Skip to content

Commit 4b351ac

Browse files
committed
chore: use a Go-based tool for obot model provider
This should improve performance when using this model provider. Signed-off-by: Donnie Adams <donnie@acorn.io>
1 parent 4f5695d commit 4b351ac

File tree

7 files changed

+170
-119
lines changed

7 files changed

+170
-119
lines changed

obot-model-provider/go.mod

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
module github.com/obot-platform/tools/obot-model-provider
2+
3+
go 1.23.4
4+
5+
require github.com/gptscript-ai/chat-completion-client v0.0.0-20241216203633-5c0178fb89ed

obot-model-provider/go.sum

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
github.com/gptscript-ai/chat-completion-client v0.0.0-20241216203633-5c0178fb89ed h1:qMHm0IYpKgmw4KHX76RMB/duSICxo7IZuimPCKb0qG4=
2+
github.com/gptscript-ai/chat-completion-client v0.0.0-20241216203633-5c0178fb89ed/go.mod h1:7P/o6/IWa1KqsntVf68hSnLKuu3+xuqm6lYhch1w4jo=

obot-model-provider/main.go

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
package main
2+
3+
import (
4+
"os"
5+
6+
"github.com/obot-platform/tools/obot-model-provider/server"
7+
)
8+
9+
func main() {
10+
obotHost := os.Getenv("OBOT_URL")
11+
if obotHost == "" {
12+
obotHost = "localhost:8080"
13+
}
14+
15+
port := os.Getenv("PORT")
16+
if port == "" {
17+
port = "8000"
18+
}
19+
20+
if err := server.Run(obotHost, port); err != nil {
21+
panic(err)
22+
}
23+
}

obot-model-provider/main.py

Lines changed: 0 additions & 113 deletions
This file was deleted.

obot-model-provider/requirements.txt

Lines changed: 0 additions & 3 deletions
This file was deleted.
Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
package server
2+
3+
import (
4+
"bytes"
5+
"compress/gzip"
6+
"encoding/json"
7+
"errors"
8+
"fmt"
9+
"io"
10+
"net/http"
11+
"net/http/httputil"
12+
"strings"
13+
"time"
14+
15+
"github.com/gptscript-ai/chat-completion-client"
16+
)
17+
18+
func Run(obotHost, port string) error {
19+
mux := http.NewServeMux()
20+
21+
s := &server{
22+
obotHost: obotHost,
23+
port: port,
24+
}
25+
26+
mux.HandleFunc("/{$}", s.healthz)
27+
mux.Handle("GET /v1/models", &httputil.ReverseProxy{
28+
Director: s.proxy("/api"),
29+
ModifyResponse: s.rewriteModelsResponse,
30+
})
31+
mux.Handle("/{path...}", &httputil.ReverseProxy{
32+
Director: s.proxy("/api/llm-proxy"),
33+
})
34+
35+
httpServer := &http.Server{
36+
Addr: "127.0.0.1:" + port,
37+
Handler: mux,
38+
}
39+
40+
if err := httpServer.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) {
41+
return err
42+
}
43+
44+
return nil
45+
}
46+
47+
type server struct {
48+
obotHost, port string
49+
}
50+
51+
func (s *server) healthz(w http.ResponseWriter, _ *http.Request) {
52+
_, _ = w.Write([]byte("http://127.0.0.1:" + s.port))
53+
}
54+
55+
func (s *server) rewriteModelsResponse(resp *http.Response) error {
56+
if resp.StatusCode != http.StatusOK {
57+
return nil
58+
}
59+
60+
originalBody := resp.Body
61+
defer originalBody.Close()
62+
63+
if resp.Header.Get("Content-Encoding") == "gzip" {
64+
var err error
65+
originalBody, err = gzip.NewReader(originalBody)
66+
if err != nil {
67+
return fmt.Errorf("failed to create gzip reader: %w", err)
68+
}
69+
defer originalBody.Close()
70+
resp.Header.Del("Content-Encoding")
71+
}
72+
73+
var models modelList
74+
if err := json.NewDecoder(originalBody).Decode(&models); err != nil {
75+
return fmt.Errorf("failed to decode models response: %w, %d, %v", err, resp.StatusCode, resp.Header)
76+
}
77+
78+
respModels := make([]openai.Model, 0, len(models.Items))
79+
var createdTimestamp int64
80+
for _, model := range models.Items {
81+
createdTimestamp = 0
82+
if created, ok := model["created"].(string); ok {
83+
if createdAt, err := time.Parse(time.RFC3339, created); err == nil {
84+
createdTimestamp = createdAt.Unix()
85+
}
86+
}
87+
respModels = append(respModels, openai.Model{
88+
CreatedAt: createdTimestamp,
89+
ID: model["id"].(string),
90+
Object: "model",
91+
Metadata: map[string]string{
92+
"usage": model["usage"].(string),
93+
},
94+
})
95+
}
96+
97+
b, err := json.Marshal(openai.ModelsList{Models: respModels})
98+
if err != nil {
99+
return fmt.Errorf("failed to marshal models response: %w", err)
100+
}
101+
102+
resp.Body = io.NopCloser(bytes.NewReader(b))
103+
resp.Header.Set("Content-Length", fmt.Sprintf("%d", len(b)))
104+
return nil
105+
}
106+
107+
func (s *server) proxy(prefix string) func(req *http.Request) {
108+
return func(req *http.Request) {
109+
req.URL.Host = s.obotHost
110+
req.URL.Scheme = "http"
111+
req.Host = req.URL.Host
112+
req.URL.Path = prefix + strings.TrimPrefix(req.URL.Path, "/v1")
113+
114+
if apiKey := getAPIKey(req); apiKey != "" {
115+
req.Header.Set("Authorization", "Bearer "+apiKey)
116+
}
117+
}
118+
}
119+
120+
func getAPIKey(req *http.Request) string {
121+
envHeader := req.Header.Get("X-GPTScript-Env")
122+
if envHeader == "" {
123+
return ""
124+
}
125+
126+
for _, env := range strings.Split(envHeader, ",") {
127+
if strings.HasPrefix(env, "GPTSCRIPT_MODEL_PROVIDER_TOKEN=") {
128+
return strings.TrimSpace(strings.TrimPrefix(env, "GPTSCRIPT_MODEL_PROVIDER_TOKEN="))
129+
}
130+
}
131+
132+
return ""
133+
}
134+
135+
type modelList struct {
136+
Items []map[string]any `json:"items"`
137+
}

obot-model-provider/tool.gpt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
Name: Odot
2-
Description: Model provider for Odot
1+
Name: Obot
2+
Description: Model provider for Obot
33
Model Provider: true
44

5-
#!sys.daemon /usr/bin/env python3 ${GPTSCRIPT_TOOL_DIR}/main.py
5+
#!sys.daemon ${GPTSCRIPT_TOOL_DIR}/bin/gptscript-go-tool

0 commit comments

Comments
 (0)