|
1 |
| -import json |
2 | 1 | import time
|
3 | 2 | from abc import ABC, abstractmethod
|
4 | 3 | from typing import Dict, Tuple
|
|
9 | 8 | from codegate.pipeline.base import PipelineContext, PipelineResult, SequentialPipelineProcessor
|
10 | 9 | from codegate.pipeline.factory import PipelineFactory
|
11 | 10 | from codegate.providers.normalizer.completion import CompletionNormalizer
|
12 |
| -from codegate.types.common import Delta, ModelResponse, StreamingChoices |
13 |
| -from codegate.types.openai import ChatCompletionRequest |
| 11 | +from codegate.types.openai import ( |
| 12 | + ChatCompletionRequest, |
| 13 | + ChoiceDelta, |
| 14 | + MessageDelta, |
| 15 | + StreamingChatCompletion, |
| 16 | +) |
14 | 17 |
|
15 | 18 | logger = structlog.get_logger("codegate")
|
16 | 19 |
|
@@ -69,18 +72,21 @@ def _get_copilot_headers(headers: Dict[str, str]) -> Dict[str, str]:
|
69 | 72 | return copilot_headers
|
70 | 73 |
|
71 | 74 | @staticmethod
|
72 |
| - def _create_shortcut_response(result: PipelineResult, model: str) -> bytes: |
73 |
| - response = ModelResponse( |
| 75 | + def _create_shortcut_response(result: PipelineResult) -> bytes: |
| 76 | + response = StreamingChatCompletion( |
| 77 | + id="", |
74 | 78 | choices=[
|
75 |
| - StreamingChoices( |
| 79 | + ChoiceDelta( |
76 | 80 | finish_reason="stop",
|
77 | 81 | index=0,
|
78 |
| - delta=Delta(content=result.response.content, role="assistant"), |
79 |
| - ) |
| 82 | + delta=MessageDelta( |
| 83 | + content=result.response.content, |
| 84 | + role="assistant"), |
| 85 | + ), |
80 | 86 | ],
|
81 |
| - created=int(time.time()), |
82 |
| - model=model, |
83 |
| - stream=True, |
| 87 | + created = int(time.time()), |
| 88 | + model=result.response.model, |
| 89 | + object="chat.completion.chunk", |
84 | 90 | )
|
85 | 91 | body = response.model_dump_json(exclude_none=True, exclude_unset=True).encode()
|
86 | 92 | return body
|
@@ -122,7 +128,7 @@ async def process_body(
|
122 | 128 | try:
|
123 | 129 | # Return shortcut response to the user
|
124 | 130 | body = CopilotPipeline._create_shortcut_response(
|
125 |
| - result, normalized_body.model, |
| 131 | + result, |
126 | 132 | )
|
127 | 133 | logger.info(f"Pipeline created shortcut response: {body}")
|
128 | 134 | return body, result.context
|
|
0 commit comments