-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Expand file tree
/
Copy pathtypes.ts
More file actions
110 lines (101 loc) · 2.56 KB
/
types.ts
File metadata and controls
110 lines (101 loc) · 2.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
/**
* Core types for LLM integration
*/
import type * as z from 'zod/v4'
/**
* Message format - OpenAI standard (industry standard)
*/
export interface Message {
role: 'system' | 'user' | 'assistant' | 'tool'
content?: string | null
tool_calls?: {
id: string
type: 'function'
function: {
name: string
arguments: string // JSON string
}
}[]
tool_call_id?: string
name?: string
}
/**
* Tool definition - uses Zod schema (LLM-agnostic)
* Supports generics for type-safe parameters and return values
*/
export interface Tool<TParams = any, TResult = any> {
// name: string
description?: string
inputSchema: z.ZodType<TParams>
execute: (args: TParams) => Promise<TResult>
}
/**
* Invoke options for LLM call
*/
export interface InvokeOptions {
/**
* Force LLM to call a specific tool by name.
* If provided: tool_choice = { type: 'function', function: { name: toolChoiceName } }
* If not provided: tool_choice = 'required' (must call some tool, but model chooses which)
*/
toolChoiceName?: string
/**
* Response normalization function.
* Called before parsing the response.
* Used to fix various response format errors from the model.
*/
normalizeResponse?: (response: any) => any
}
/**
* LLM Client interface
* Note: Does not use generics because each tool in the tools array has different types
*/
export interface LLMClient {
invoke(
messages: Message[],
tools: Record<string, Tool>,
abortSignal?: AbortSignal,
options?: InvokeOptions
): Promise<InvokeResult>
}
/**
* Invoke result (strict typing, supports generics)
*/
export interface InvokeResult<TResult = unknown> {
toolCall: {
// id?: string // OpenAI's tool_call_id
name: string
args: any
}
toolResult: TResult // Supports generics, but defaults to unknown
usage: {
promptTokens: number
completionTokens: number
totalTokens: number
cachedTokens?: number // Prompt cache hits
reasoningTokens?: number // OpenAI o1 series reasoning tokens
}
rawResponse?: unknown // Raw response for debugging
rawRequest?: unknown // Raw request for debugging
}
/**
* LLM configuration
*/
export interface LLMConfig {
baseURL: string
model: string
apiKey?: string
temperature?: number
maxRetries?: number
/**
* remove the tool_choice field from the request.
* @note fix "Invalid tool_choice type: 'object'" for some LLMs.
*/
disableNamedToolChoice?: boolean
/**
* Custom fetch function for LLM API requests.
* Use this to customize headers, credentials, proxy, etc.
* The response should follow OpenAI API format.
*/
customFetch?: typeof globalThis.fetch
}