|
| 1 | +package server |
| 2 | + |
| 3 | +import ( |
| 4 | + "bytes" |
| 5 | + "compress/gzip" |
| 6 | + "encoding/json" |
| 7 | + "errors" |
| 8 | + "fmt" |
| 9 | + "io" |
| 10 | + "net/http" |
| 11 | + "net/http/httputil" |
| 12 | + "strings" |
| 13 | + "time" |
| 14 | + |
| 15 | + "github.com/gptscript-ai/chat-completion-client" |
| 16 | +) |
| 17 | + |
| 18 | +func Run(obotHost, port string) error { |
| 19 | + mux := http.NewServeMux() |
| 20 | + |
| 21 | + s := &server{ |
| 22 | + obotHost: obotHost, |
| 23 | + port: port, |
| 24 | + } |
| 25 | + |
| 26 | + mux.HandleFunc("/{$}", s.healthz) |
| 27 | + mux.Handle("GET /v1/models", &httputil.ReverseProxy{ |
| 28 | + Director: s.proxy("/api"), |
| 29 | + ModifyResponse: s.rewriteModelsResponse, |
| 30 | + }) |
| 31 | + mux.Handle("/{path...}", &httputil.ReverseProxy{ |
| 32 | + Director: s.proxy("/api/llm-proxy"), |
| 33 | + }) |
| 34 | + |
| 35 | + httpServer := &http.Server{ |
| 36 | + Addr: "127.0.0.1:" + port, |
| 37 | + Handler: mux, |
| 38 | + } |
| 39 | + |
| 40 | + if err := httpServer.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { |
| 41 | + return err |
| 42 | + } |
| 43 | + |
| 44 | + return nil |
| 45 | +} |
| 46 | + |
| 47 | +type server struct { |
| 48 | + obotHost, port string |
| 49 | +} |
| 50 | + |
| 51 | +func (s *server) healthz(w http.ResponseWriter, _ *http.Request) { |
| 52 | + _, _ = w.Write([]byte("http://127.0.0.1:" + s.port)) |
| 53 | +} |
| 54 | + |
| 55 | +func (s *server) rewriteModelsResponse(resp *http.Response) error { |
| 56 | + if resp.StatusCode != http.StatusOK { |
| 57 | + return nil |
| 58 | + } |
| 59 | + |
| 60 | + originalBody := resp.Body |
| 61 | + defer originalBody.Close() |
| 62 | + |
| 63 | + if resp.Header.Get("Content-Encoding") == "gzip" { |
| 64 | + var err error |
| 65 | + originalBody, err = gzip.NewReader(originalBody) |
| 66 | + if err != nil { |
| 67 | + return fmt.Errorf("failed to create gzip reader: %w", err) |
| 68 | + } |
| 69 | + defer originalBody.Close() |
| 70 | + resp.Header.Del("Content-Encoding") |
| 71 | + } |
| 72 | + |
| 73 | + var models modelList |
| 74 | + if err := json.NewDecoder(originalBody).Decode(&models); err != nil { |
| 75 | + return fmt.Errorf("failed to decode models response: %w, %d, %v", err, resp.StatusCode, resp.Header) |
| 76 | + } |
| 77 | + |
| 78 | + respModels := make([]openai.Model, 0, len(models.Items)) |
| 79 | + var createdTimestamp int64 |
| 80 | + for _, model := range models.Items { |
| 81 | + createdTimestamp = 0 |
| 82 | + if created, ok := model["created"].(string); ok { |
| 83 | + if createdAt, err := time.Parse(time.RFC3339, created); err == nil { |
| 84 | + createdTimestamp = createdAt.Unix() |
| 85 | + } |
| 86 | + } |
| 87 | + respModels = append(respModels, openai.Model{ |
| 88 | + CreatedAt: createdTimestamp, |
| 89 | + ID: model["id"].(string), |
| 90 | + Object: "model", |
| 91 | + Metadata: map[string]string{ |
| 92 | + "usage": model["usage"].(string), |
| 93 | + }, |
| 94 | + }) |
| 95 | + } |
| 96 | + |
| 97 | + b, err := json.Marshal(openai.ModelsList{Models: respModels}) |
| 98 | + if err != nil { |
| 99 | + return fmt.Errorf("failed to marshal models response: %w", err) |
| 100 | + } |
| 101 | + |
| 102 | + resp.Body = io.NopCloser(bytes.NewReader(b)) |
| 103 | + resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(b))) |
| 104 | + return nil |
| 105 | +} |
| 106 | + |
| 107 | +func (s *server) proxy(prefix string) func(req *http.Request) { |
| 108 | + return func(req *http.Request) { |
| 109 | + req.URL.Host = s.obotHost |
| 110 | + req.URL.Scheme = "http" |
| 111 | + req.Host = req.URL.Host |
| 112 | + req.URL.Path = prefix + strings.TrimPrefix(req.URL.Path, "/v1") |
| 113 | + |
| 114 | + if apiKey := getAPIKey(req); apiKey != "" { |
| 115 | + req.Header.Set("Authorization", "Bearer "+apiKey) |
| 116 | + } |
| 117 | + } |
| 118 | +} |
| 119 | + |
| 120 | +func getAPIKey(req *http.Request) string { |
| 121 | + envHeader := req.Header.Get("X-GPTScript-Env") |
| 122 | + if envHeader == "" { |
| 123 | + return "" |
| 124 | + } |
| 125 | + |
| 126 | + for _, env := range strings.Split(envHeader, ",") { |
| 127 | + if strings.HasPrefix(env, "GPTSCRIPT_MODEL_PROVIDER_TOKEN=") { |
| 128 | + return strings.TrimSpace(strings.TrimPrefix(env, "GPTSCRIPT_MODEL_PROVIDER_TOKEN=")) |
| 129 | + } |
| 130 | + } |
| 131 | + |
| 132 | + return "" |
| 133 | +} |
| 134 | + |
| 135 | +type modelList struct { |
| 136 | + Items []map[string]any `json:"items"` |
| 137 | +} |
0 commit comments