Skip to content

Commit f1c0f8d

Browse files
committed
enhance: add settings for skip tls cert check
1 parent 013d810 commit f1c0f8d

File tree

8 files changed

+442
-436
lines changed

8 files changed

+442
-436
lines changed

api/openai/openai.go

Lines changed: 119 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
11
package openai
22

33
import (
4-
"context"
5-
"crypto/tls"
6-
"fmt"
7-
"github.com/0xJacky/Nginx-UI/api"
8-
"github.com/0xJacky/Nginx-UI/internal/chatbot"
9-
"github.com/0xJacky/Nginx-UI/settings"
10-
"github.com/gin-gonic/gin"
11-
"github.com/pkg/errors"
12-
"github.com/sashabaranov/go-openai"
13-
"io"
14-
"net/http"
15-
"net/url"
4+
"context"
5+
"crypto/tls"
6+
"fmt"
7+
"github.com/0xJacky/Nginx-UI/api"
8+
"github.com/0xJacky/Nginx-UI/internal/chatbot"
9+
"github.com/0xJacky/Nginx-UI/settings"
10+
"github.com/gin-gonic/gin"
11+
"github.com/pkg/errors"
12+
"github.com/sashabaranov/go-openai"
13+
"io"
14+
"net/http"
15+
"net/url"
1616
)
1717

1818
const ChatGPTInitPrompt = `You are a assistant who can help users write and optimise the configurations of Nginx,
@@ -22,111 +22,111 @@ Later the language environment depends on the user message.
2222
The first reply should involve the key information of the file and ask user what can you help them.`
2323

2424
func MakeChatCompletionRequest(c *gin.Context) {
25-
var json struct {
26-
Filepath string `json:"filepath"`
27-
Messages []openai.ChatCompletionMessage `json:"messages"`
28-
}
29-
30-
if !api.BindAndValid(c, &json) {
31-
return
32-
}
33-
34-
messages := []openai.ChatCompletionMessage{
35-
{
36-
Role: openai.ChatMessageRoleSystem,
37-
Content: ChatGPTInitPrompt,
38-
},
39-
}
40-
41-
messages = append(messages, json.Messages...)
42-
43-
if json.Filepath != "" {
44-
messages = chatbot.ChatCompletionWithContext(json.Filepath, messages)
45-
}
46-
47-
// SSE server
48-
c.Writer.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
49-
c.Writer.Header().Set("Cache-Control", "no-cache")
50-
c.Writer.Header().Set("Connection", "keep-alive")
51-
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
52-
53-
config := openai.DefaultConfig(settings.OpenAISettings.Token)
54-
55-
if settings.OpenAISettings.Proxy != "" {
56-
proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
57-
if err != nil {
58-
c.Stream(func(w io.Writer) bool {
59-
c.SSEvent("message", gin.H{
60-
"type": "error",
61-
"content": err.Error(),
62-
})
63-
return false
64-
})
65-
return
66-
}
67-
transport := &http.Transport{
68-
Proxy: http.ProxyURL(proxyUrl),
69-
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
70-
}
71-
config.HTTPClient = &http.Client{
72-
Transport: transport,
73-
}
74-
}
75-
76-
if settings.OpenAISettings.BaseUrl != "" {
77-
config.BaseURL = settings.OpenAISettings.BaseUrl
78-
}
79-
80-
openaiClient := openai.NewClientWithConfig(config)
81-
ctx := context.Background()
82-
83-
req := openai.ChatCompletionRequest{
84-
Model: settings.OpenAISettings.Model,
85-
Messages: messages,
86-
Stream: true,
87-
}
88-
stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
89-
if err != nil {
90-
fmt.Printf("CompletionStream error: %v\n", err)
91-
c.Stream(func(w io.Writer) bool {
92-
c.SSEvent("message", gin.H{
93-
"type": "error",
94-
"content": err.Error(),
95-
})
96-
return false
97-
})
98-
return
99-
}
100-
defer stream.Close()
101-
msgChan := make(chan string)
102-
go func() {
103-
defer close(msgChan)
104-
for {
105-
response, err := stream.Recv()
106-
if errors.Is(err, io.EOF) {
107-
fmt.Println()
108-
return
109-
}
110-
111-
if err != nil {
112-
fmt.Printf("Stream error: %v\n", err)
113-
return
114-
}
115-
116-
message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
117-
118-
msgChan <- message
119-
}
120-
}()
121-
122-
c.Stream(func(w io.Writer) bool {
123-
if m, ok := <-msgChan; ok {
124-
c.SSEvent("message", gin.H{
125-
"type": "message",
126-
"content": m,
127-
})
128-
return true
129-
}
130-
return false
131-
})
25+
var json struct {
26+
Filepath string `json:"filepath"`
27+
Messages []openai.ChatCompletionMessage `json:"messages"`
28+
}
29+
30+
if !api.BindAndValid(c, &json) {
31+
return
32+
}
33+
34+
messages := []openai.ChatCompletionMessage{
35+
{
36+
Role: openai.ChatMessageRoleSystem,
37+
Content: ChatGPTInitPrompt,
38+
},
39+
}
40+
41+
messages = append(messages, json.Messages...)
42+
43+
if json.Filepath != "" {
44+
messages = chatbot.ChatCompletionWithContext(json.Filepath, messages)
45+
}
46+
47+
// SSE server
48+
c.Writer.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
49+
c.Writer.Header().Set("Cache-Control", "no-cache")
50+
c.Writer.Header().Set("Connection", "keep-alive")
51+
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
52+
53+
config := openai.DefaultConfig(settings.OpenAISettings.Token)
54+
55+
if settings.OpenAISettings.Proxy != "" {
56+
proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
57+
if err != nil {
58+
c.Stream(func(w io.Writer) bool {
59+
c.SSEvent("message", gin.H{
60+
"type": "error",
61+
"content": err.Error(),
62+
})
63+
return false
64+
})
65+
return
66+
}
67+
transport := &http.Transport{
68+
Proxy: http.ProxyURL(proxyUrl),
69+
TLSClientConfig: &tls.Config{InsecureSkipVerify: settings.ServerSettings.InsecureSkipVerify},
70+
}
71+
config.HTTPClient = &http.Client{
72+
Transport: transport,
73+
}
74+
}
75+
76+
if settings.OpenAISettings.BaseUrl != "" {
77+
config.BaseURL = settings.OpenAISettings.BaseUrl
78+
}
79+
80+
openaiClient := openai.NewClientWithConfig(config)
81+
ctx := context.Background()
82+
83+
req := openai.ChatCompletionRequest{
84+
Model: settings.OpenAISettings.Model,
85+
Messages: messages,
86+
Stream: true,
87+
}
88+
stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
89+
if err != nil {
90+
fmt.Printf("CompletionStream error: %v\n", err)
91+
c.Stream(func(w io.Writer) bool {
92+
c.SSEvent("message", gin.H{
93+
"type": "error",
94+
"content": err.Error(),
95+
})
96+
return false
97+
})
98+
return
99+
}
100+
defer stream.Close()
101+
msgChan := make(chan string)
102+
go func() {
103+
defer close(msgChan)
104+
for {
105+
response, err := stream.Recv()
106+
if errors.Is(err, io.EOF) {
107+
fmt.Println()
108+
return
109+
}
110+
111+
if err != nil {
112+
fmt.Printf("Stream error: %v\n", err)
113+
return
114+
}
115+
116+
message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
117+
118+
msgChan <- message
119+
}
120+
}()
121+
122+
c.Stream(func(w io.Writer) bool {
123+
if m, ok := <-msgChan; ok {
124+
c.SSEvent("message", gin.H{
125+
"type": "message",
126+
"content": m,
127+
})
128+
return true
129+
}
130+
return false
131+
})
132132
}

0 commit comments

Comments
 (0)