Skip to content

Commit fb06251

Browse files
authored
enhance: add support for streaming with o1 model (#461)
Signed-off-by: Donnie Adams <[email protected]>
1 parent 7852f90 commit fb06251

File tree

2 files changed

+1
-64
lines changed

2 files changed

+1
-64
lines changed

openai-model-provider/main.go

+1-2
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,7 @@ func main() {
3333

3434
openaiProxy := openaiproxy.NewServer(cfg)
3535
reverseProxy := &httputil.ReverseProxy{
36-
Director: openaiProxy.Openaiv1ProxyRedirect,
37-
ModifyResponse: openaiProxy.ModifyResponse,
36+
Director: openaiProxy.Openaiv1ProxyRedirect,
3837
}
3938
cfg.CustomPathHandleFuncs["/v1/"] = reverseProxy.ServeHTTP
4039

openai-model-provider/openaiproxy/proxy.go

-62
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ func (s *Server) Openaiv1ProxyRedirect(req *http.Request) {
5050
}
5151

5252
func modifyRequestBodyForO1(req *http.Request, reqBody *openai.ChatCompletionRequest) error {
53-
reqBody.Stream = false
5453
reqBody.Temperature = nil
5554
for i, msg := range reqBody.Messages {
5655
if msg.Role == "system" {
@@ -63,67 +62,6 @@ func modifyRequestBodyForO1(req *http.Request, reqBody *openai.ChatCompletionReq
6362
}
6463
req.Body = io.NopCloser(bytes.NewBuffer(modifiedBodyBytes))
6564
req.ContentLength = int64(len(modifiedBodyBytes))
66-
req.Header.Set("Accept", "application/json")
67-
req.Header.Set("Accept-Encoding", "")
68-
req.Header.Set("Content-Type", "application/json")
69-
return nil
70-
}
71-
72-
func (s *Server) ModifyResponse(resp *http.Response) error {
73-
if resp.StatusCode != http.StatusOK || resp.Request.URL.Path != proxy.ChatCompletionsPath || resp.Request.URL.Host != proxy.OpenaiBaseHostName {
74-
return nil
75-
}
76-
77-
if resp.Header.Get("Content-Type") == "application/json" {
78-
rawBody, err := io.ReadAll(resp.Body)
79-
if err != nil {
80-
resp.Body.Close()
81-
return fmt.Errorf("failed to read response body: %w", err)
82-
}
83-
resp.Body.Close()
84-
var respBody openai.ChatCompletionResponse
85-
if err := json.Unmarshal(rawBody, &respBody); err == nil && isModelO1(respBody.Model) {
86-
// Convert non-streaming response to a single SSE for o1 model
87-
streamResponse := openai.ChatCompletionStreamResponse{
88-
ID: respBody.ID,
89-
Object: respBody.Object,
90-
Created: respBody.Created,
91-
Model: respBody.Model,
92-
Usage: respBody.Usage,
93-
Choices: func() []openai.ChatCompletionStreamChoice {
94-
var choices []openai.ChatCompletionStreamChoice
95-
for _, choice := range respBody.Choices {
96-
choices = append(choices, openai.ChatCompletionStreamChoice{
97-
Index: choice.Index,
98-
Delta: openai.ChatCompletionStreamChoiceDelta{
99-
Content: choice.Message.Content,
100-
Role: choice.Message.Role,
101-
FunctionCall: choice.Message.FunctionCall,
102-
ToolCalls: choice.Message.ToolCalls,
103-
},
104-
FinishReason: choice.FinishReason,
105-
})
106-
}
107-
return choices
108-
}(),
109-
}
110-
111-
sseData, err := json.Marshal(streamResponse)
112-
if err != nil {
113-
return fmt.Errorf("failed to marshal stream response: %w", err)
114-
}
115-
116-
sseFormattedData := fmt.Sprintf("data: %s\n\nevent: close\ndata: [DONE]\n\n", sseData)
117-
118-
resp.Header.Set("Content-Type", "text/event-stream")
119-
resp.Header.Set("Cache-Control", "no-cache")
120-
resp.Header.Set("Connection", "keep-alive")
121-
resp.Body = io.NopCloser(bytes.NewBufferString(sseFormattedData))
122-
} else {
123-
resp.Body = io.NopCloser(bytes.NewBuffer(rawBody))
124-
}
125-
}
126-
12765
return nil
12866
}
12967

0 commit comments

Comments
 (0)