Skip to content

Commit a726f89

Browse files
authored
Merge pull request #12 from emilrueh/dev
Package Chat as feature resulting in better abstraction
2 parents ff306f9 + 4800b95 commit a726f89

File tree

7 files changed

+81
-47
lines changed

7 files changed

+81
-47
lines changed

README.md

+33-11
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ A developer-friendly Lua interface for working with multiple generative AI provi
44

55
## Providers
66

7-
> ⚠️ This is a work in progress so any help is appreciated!
7+
> ⚠️ This is a work in progress so any help is highly appreciated!
88
99
- [OpenAI](https://platform.openai.com/docs/overview)
1010

@@ -23,19 +23,41 @@ A developer-friendly Lua interface for working with multiple generative AI provi
2323

2424
```lua
2525
local AI = require("src.ai")
26-
local Chat = require("src.chat")
2726

28-
local api_key = "<YOUR_API_KEY>"
29-
local endpoint = "https://api.openai.com/v1/chat/completions"
30-
local model = "gpt-4o-mini"
31-
local system_prompt = "You are Torben, the king of a nation."
32-
local settings = { stream = true }
27+
local client = AI.new("<YOUR_API_KEY>", "https://api.openai.com/v1/chat/completions")
28+
```
29+
30+
### Minimal
31+
32+
```lua
33+
local chat = client:chat("gpt-4o-mini")
34+
print(chat:say("Hello, world!"))
35+
```
3336

34-
local ai = AI.new(api_key, endpoint)
35-
local chat = Chat.new(ai, model, system_prompt, settings)
37+
### Streaming
3638

37-
local reply = chat:say("Give three short words of advice to the hero.")
38-
if not chat.settings.stream then print(reply) end
39+
```lua
40+
local chat = client:chat("gpt-4o-mini", { settings = { stream = true } })
41+
chat:say("Hello, world!")
42+
```
43+
44+
### JSON
45+
46+
```lua
47+
local npc_schema = {
48+
name = { type = "string" },
49+
class = { type = "string" },
50+
level = { type = "integer" },
51+
}
52+
53+
local json_object = {
54+
title = "NPC",
55+
description = "A non-player character's attributes.",
56+
schema = npc_schema,
57+
}
58+
59+
local chat = client:chat("gpt-4o-mini", { settings = { json = json_object } })
60+
print(chat:say("Create a powerful wizard called Torben."))
3961
```
4062

4163
See `main.lua` for a more detailed example.

main.lua

+9-14
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
local config = require("src.config")
22
local AI = require("src.ai")
3-
local Chat = require("src.chat")
43

54
local api_keys = config.api_keys
65

@@ -20,25 +19,21 @@ local structured_response_obj = {
2019
-- local api_key = api_keys.anthropic_api_key
2120
-- local endpoint = "https://api.anthropic.com/v1/messages"
2221
-- local model = "claude-3-5-sonnet-20241022"
23-
-- local settings = {
24-
-- stream = false,
25-
-- json = structured_response_obj,
26-
-- }
2722

2823
local api_key = api_keys.openai_api_key
2924
local endpoint = "https://api.openai.com/v1/chat/completions"
3025
local model = "gpt-4o-mini"
31-
local settings = {
32-
stream = false,
33-
json = structured_response_obj,
34-
}
35-
36-
local system_prompt = "Respond extremely briefly."
37-
38-
local ai = AI.new(api_key, endpoint)
39-
local chat = Chat.new(ai, model, system_prompt, settings)
4026

4127
local function main()
28+
local client = AI.new(api_key, endpoint)
29+
local chat = client:chat(model, {
30+
system_prompt = "Respond extremely briefly.",
31+
settings = {
32+
stream = false,
33+
json = structured_response_obj,
34+
},
35+
})
36+
4237
while true do
4338
local user_prompt = "You are King Torben giving advice."
4439
print(user_prompt)

src/ai.lua

+13-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
local config = require("src.config")
22
local utils = require("src.utils")
3-
local providers = require("src.providers._load")
3+
local providers = require("src.providers")
4+
local features = require("src.features")
5+
46
local cjson = config.cjson
57

68
---Client for interacting with specified API endpoint
@@ -86,4 +88,14 @@ function AI:call(opts)
8688
return reply, input_tokens, output_tokens
8789
end
8890

91+
-- features:
92+
93+
---Create chat instance with automatic tracking of messages and tokens
94+
---@param model string
95+
---@param opts table? Containing **settings** and or **system_prompt**
96+
---@return Chat
97+
function AI:chat(model, opts)
98+
return features.Chat.new(self, model, opts)
99+
end
100+
89101
return AI

src/features.lua

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---@module "src.features"
2+
local features = {}
3+
4+
features.Chat = require("src.features.chat")
5+
6+
return features

src/chat.lua src/features/chat.lua

+13-12
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
local utils = require("src.utils")
22

33
---@class Chat Accumulating chat history and usage
4-
---@field _ai table
4+
---@field ai table
55
---@field model string
66
---@field settings table?
77
---@field usage table
@@ -12,20 +12,19 @@ Chat.__index = Chat
1212

1313
---@param ai table
1414
---@param model string
15-
---@param system_prompt string?
16-
---@param settings table?
17-
function Chat.new(ai, model, system_prompt, settings)
15+
---@param opts table? Containing **settings** and or **system_prompt**
16+
function Chat.new(ai, model, opts)
1817
local self = setmetatable({}, Chat)
1918

20-
self._ai = ai
19+
self.ai = ai
2120
self.model = model
22-
self.settings = settings or {}
21+
self.settings = opts and opts.settings or {}
2322
self.usage = { input = 0, output = 0 }
2423
self.history = {}
25-
self.system_prompt = system_prompt
24+
self.system_prompt = opts and opts.system_prompt
2625

2726
-- insert system prompt into chat history at the start if provided
28-
local system_message = self._ai.provider.construct_system_message(self.system_prompt)
27+
local system_message = self.ai.provider.construct_system_message(self.system_prompt)
2928
if system_message then -- some providers use system message as top-level arg
3029
table.insert(self.history, system_message)
3130
end
@@ -37,16 +36,18 @@ end
3736
---@param user_prompt string
3837
---@return string reply Full response text whether streamed or not
3938
function Chat:say(user_prompt)
40-
table.insert(self.history, self._ai.provider.construct_user_message(user_prompt))
41-
local reply, input_tokens, output_tokens = self._ai:call(self)
42-
table.insert(self.history, self._ai.provider.construct_assistant_message(reply))
39+
table.insert(self.history, self.ai.provider.construct_user_message(user_prompt))
40+
local reply, input_tokens, output_tokens = self.ai:call(self)
41+
table.insert(self.history, self.ai.provider.construct_assistant_message(reply))
4342
self.usage.input = self.usage.input + input_tokens
4443
self.usage.output = self.usage.output + output_tokens
4544
return reply
4645
end
4746

47+
---Caculate model pricing from input and output tokens in USD
48+
---@return number
4849
function Chat:get_cost()
49-
return utils.calc_token_cost(self.model, self.usage, self._ai.provider.pricing)
50+
return utils.calc_token_cost(self.model, self.usage, self.ai.provider.pricing)
5051
end
5152

5253
return Chat

src/providers.lua

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---@module "src.providers"
2+
local providers = {}
3+
4+
providers.openai = require("src.providers.openai")
5+
providers.anthropic = require("src.providers.anthropic")
6+
7+
return providers

src/providers/_load.lua

-9
This file was deleted.

0 commit comments

Comments
 (0)