Skip to content

Commit dd2f0d1

Browse files
authored
Merge pull request #14 from emilrueh/dev
Setup luarock structure and expose stream handler
2 parents c7c30fc + 097c833 commit dd2f0d1

15 files changed

+248
-201
lines changed

README.md

+29-13
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
# Unified Lua Interface for Generative AI
1+
# Generative AI SDK for Lua
22

3-
A developer-friendly Lua interface for working with multiple generative AI providers, abstracting away provider-specific payload structures and response parsing so you can easily switch between various models and providers without rewriting any code.
3+
A developer-friendly Lua interface for working with various generative AI providers, abstracting away provider-specific payload structures and response parsing so that using multiple models is easy.
44

55
## Providers
66

@@ -15,33 +15,49 @@ A developer-friendly Lua interface for working with multiple generative AI provi
1515
- Easily switch between AI chat model providers
1616
- Pass in prompts and get replies without the provider complexity
1717
- Easily integrate new models and adjust settings
18-
- Work directly with the `src.ai` client for more granular control
18+
- Use the `chat` object for integrated message handling
19+
- Use the `genai` client directly for more granular control if needed
1920
- Abstraction for structured response JSON output
2021
- Token usage tracking with cost calculation
2122

23+
## Installation
24+
25+
```
26+
luarocks install lua-genai
27+
```
28+
2229
## Usage
2330

2431
```lua
25-
local AI = require("src.ai")
32+
local genai = require("genai")
33+
34+
local client = genai.new("<YOUR_API_KEY>", "https://api.openai.com/v1/chat/completions")
2635

27-
local client = AI.new("<YOUR_API_KEY>", "https://api.openai.com/v1/chat/completions")
36+
local chat = client:chat("gpt-4o-mini")
37+
print(chat:say("Hello, world!"))
2838
```
2939

30-
### Minimal
40+
### System Prompt
3141

3242
```lua
33-
local chat = client:chat("gpt-4o-mini")
34-
print(chat:say("Hello, world!"))
43+
local chat = client:chat("gpt-4o-mini", { system_prompt = "You are a fish." })
44+
print(chat:say("What are you?"))
3545
```
3646

3747
### Streaming
3848

3949
```lua
40-
local chat = client:chat("gpt-4o-mini", { settings = { stream = true } })
41-
chat:say("Hello, world!")
50+
local process_stream = function(text)
51+
io.write(text)
52+
io.flush()
53+
end
54+
55+
local chat = client:chat("gpt-4o-mini", { settings = { stream = process_stream } })
56+
chat:say("Tell me a very short story.")
57+
print()
4258
```
4359

44-
### JSON
60+
### JSON Response
4561

4662
```lua
4763
local npc_schema = {
@@ -60,9 +76,9 @@ local chat = client:chat("gpt-4o-mini", { settings = { json = json_object } })
6076
print(chat:say("Create a powerful wizard called Torben."))
6177
```
6278

63-
See `main.lua` for a more detailed example.
79+
See `example.lua` for a full-featured Anthropic implementation.
6480

65-
### Dependencies
81+
## Dependencies
6682

6783
- [lua-cjson](https://github.com/openresty/lua-cjson)
6884

example.lua

+50
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
local genai = require("genai")
2+
3+
local api_key = "<YOUR_API_KEY>"
4+
local endpoint = "https://api.anthropic.com/v1/messages"
5+
local model = "claude-3-5-sonnet-20241022"
6+
7+
local client = genai.new(api_key, endpoint)
8+
9+
local response_schema = {
10+
name = {
11+
type = "string",
12+
},
13+
response = {
14+
type = "string",
15+
},
16+
}
17+
18+
local chat = client:chat(model, {
19+
system_prompt = "Respond extremely briefly.",
20+
settings = {
21+
json = {
22+
title = "NPC",
23+
description = "Response schema of NPCs.",
24+
schema = response_schema,
25+
},
26+
stream = function(text)
27+
io.write(text)
28+
io.flush()
29+
end,
30+
},
31+
})
32+
33+
while true do
34+
local user_prompt = "You are King Torben giving advice."
35+
print(user_prompt)
36+
print()
37+
38+
local reply = chat:say(user_prompt) -- API call
39+
40+
if not chat.settings.stream then
41+
print(reply)
42+
else
43+
print()
44+
end
45+
print()
46+
break
47+
end
48+
49+
local usd_token_cost = chat:get_cost()
50+
print(usd_token_cost .. "usd")

main.lua

-59
This file was deleted.

rockspecs/lua-genai-0.1-1.rockspec

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
package = "lua-genai"
2+
version = "0.1-1"
3+
4+
source = {
5+
url = "https://github.com/emilrueh/lua-genai.git",
6+
tag = "v0.1",
7+
}
8+
9+
description = {
10+
summary = "Generative AI SDK",
11+
detailed = "Interface for generative AI providers like OpenAI, Anthropic, Google Gemini, etc. abstracting away provider-specific payload structures and response parsing to simplify switching models.",
12+
homepage = "https://github.com/emilrueh/lua-genai",
13+
license = "Zlib",
14+
}
15+
16+
dependencies = {
17+
"lua >= 5.1",
18+
"lua-cjson",
19+
"luasec",
20+
}
21+
22+
build = {
23+
type = "builtin",
24+
-- copy_directories = { "docs" },
25+
modules = {
26+
["genai"] = "src/genai/init.lua",
27+
["genai.genai"] = "src/genai/genai.lua",
28+
["genai.utils"] = "src/genai/utils.lua",
29+
["genai.features"] = "src/genai/features/init.lua",
30+
["genai.features.chat"] = "src/genai/features/chat.lua",
31+
["genai.providers"] = "src/genai/providers/init.lua",
32+
["genai.providers.anthropic"] = "src/genai/providers/anthropic.lua",
33+
["genai.providers.openai"] = "src/genai/providers/openai.lua",
34+
},
35+
}

src/features.lua

-6
This file was deleted.
File renamed without changes.
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
local utils = require("src.utils")
1+
local utils = require("genai.utils")
22

33
---@class Chat Accumulating chat history and usage
4-
---@field ai table
4+
---@field client table
55
---@field model string
66
---@field settings table?
77
---@field usage table
@@ -10,21 +10,21 @@ local utils = require("src.utils")
1010
local Chat = {}
1111
Chat.__index = Chat
1212

13-
---@param ai table
13+
---@param client table
1414
---@param model string
1515
---@param opts table? Containing **settings** and or **system_prompt**
16-
function Chat.new(ai, model, opts)
16+
function Chat.new(client, model, opts)
1717
local self = setmetatable({}, Chat)
1818

19-
self.ai = ai
19+
self.client = client
2020
self.model = model
2121
self.settings = opts and opts.settings or {}
2222
self.usage = { input = 0, output = 0 }
2323
self.history = {}
2424
self.system_prompt = opts and opts.system_prompt
2525

2626
-- insert system prompt into chat history at the start if provided
27-
local system_message = self.ai.provider.construct_system_message(self.system_prompt)
27+
local system_message = self.client.provider.construct_system_message(self.system_prompt)
2828
if system_message then -- some providers use system message as top-level arg
2929
table.insert(self.history, system_message)
3030
end
@@ -36,9 +36,9 @@ end
3636
---@param user_prompt string
3737
---@return string reply Full response text whether streamed or not
3838
function Chat:say(user_prompt)
39-
table.insert(self.history, self.ai.provider.construct_user_message(user_prompt))
40-
local reply, input_tokens, output_tokens = self.ai:call(self)
41-
table.insert(self.history, self.ai.provider.construct_assistant_message(reply))
39+
table.insert(self.history, self.client.provider.construct_user_message(user_prompt))
40+
local reply, input_tokens, output_tokens = self.client:call(self)
41+
table.insert(self.history, self.client.provider.construct_assistant_message(reply))
4242
self.usage.input = self.usage.input + input_tokens
4343
self.usage.output = self.usage.output + output_tokens
4444
return reply
@@ -47,7 +47,7 @@ end
4747
---Caculate model pricing from input and output tokens in USD
4848
---@return number
4949
function Chat:get_cost()
50-
return utils.calc_token_cost(self.model, self.usage, self.ai.provider.pricing)
50+
return utils.calc_token_cost(self.model, self.usage, self.client.provider.pricing)
5151
end
5252

5353
return Chat

src/genai/features/init.lua

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---@module "genai.features"
2+
local features = {}
3+
4+
features.Chat = require("genai.features.chat")
5+
6+
return features

0 commit comments

Comments
 (0)