@@ -18,8 +18,8 @@ running `npm install`.
18
18
19
19
## Usage
20
20
21
- To use the module and run gptscripts, you need to first set the OPENAI_API_KEY environment variable to your OpenAI API
22
- key.
21
+ To use the module and run gptscripts, you need to first set the ` OPENAI_API_KEY ` environment variable to your OpenAI API
22
+ key. You can also set the ` GPTSCRIPT_BIN ` environment variable to change the execution of the gptscripts.
23
23
24
24
To ensure it is working properly, you can run the following command:
25
25
@@ -31,11 +31,10 @@ You will see "Hello, World!" in the output of the command.
31
31
32
32
## Client
33
33
34
- There are currently a couple "global" options, and the client helps to manage those. A client without any options is
35
- likely what you want. However, here are the current global options:
36
-
37
- - ` gptscriptURL ` : The URL (including `http(s)://) of an "SDK server" to use instead of the fork/exec model.
38
- - ` gptscriptBin ` : The path to a ` gptscript ` binary to use instead of the bundled one.
34
+ The client allows the caller to run gptscript files, tools, and other operations (see below). There are currently no
35
+ options for this singleton client, so ` await gptscript.Client.init() ` is all you need. Although, the intention is that a
36
+ single client is all you need for the life of your application, you should call ` close() ` on the client when you are
37
+ done.
39
38
40
39
## Options
41
40
@@ -45,7 +44,6 @@ None of the options is required, and the defaults will reduce the number of call
45
44
- ` disableCache ` : Enable or disable caching, default (true)
46
45
- ` cacheDir ` : Specify the cache directory
47
46
- ` quiet ` : No output logging
48
- - ` chdir ` : Change current working directory
49
47
- ` subTool ` : Use tool of this name, not the first tool
50
48
- ` workspace ` : Directory to use for the workspace, if specified it will not be deleted on exit
51
49
@@ -61,9 +59,10 @@ Lists all the available built-in tools.
61
59
const gptscript = require (' @gptscript-ai/gptscript' );
62
60
63
61
async function listTools () {
64
- const client = new gptscript.Client ();
62
+ const client = await gptscript .Client . init ();
65
63
const tools = await client .listTools ();
66
64
console .log (tools);
65
+ client .close ()
67
66
}
68
67
```
69
68
@@ -78,12 +77,13 @@ const gptscript = require('@gptscript-ai/gptscript');
78
77
79
78
async function listModels () {
80
79
let models = [];
80
+ const client = await gptscript .Client .init ();
81
81
try {
82
- const client = new gptscript.Client ();
83
82
models = await client .listModels ();
84
83
} catch (error) {
85
84
console .error (error);
86
85
}
86
+ client .close ()
87
87
}
88
88
```
89
89
@@ -97,12 +97,13 @@ Get the first of the current `gptscript` binary being used for the calls.
97
97
const gptscript = require (' @gptscript-ai/gptscript' );
98
98
99
99
async function version () {
100
+ const client = await gptscript .Client .init ();
100
101
try {
101
- const client = new gptscript.Client ();
102
102
console .log (await client .version ());
103
103
} catch (error) {
104
104
console .error (error);
105
105
}
106
+ client .close ()
106
107
}
107
108
```
108
109
@@ -118,13 +119,14 @@ const t = {
118
119
instructions: " Who was the president of the united states in 1928?"
119
120
};
120
121
122
+ const client = await gptscript .Client .init ();
121
123
try {
122
- const client = new gptscript.Client ();
123
124
const run = client .evaluate (t);
124
125
console .log (await run .text ());
125
126
} catch (error) {
126
127
console .error (error);
127
128
}
129
+ client .close ();
128
130
```
129
131
130
132
### run
@@ -140,13 +142,14 @@ const opts = {
140
142
};
141
143
142
144
async function execFile () {
145
+ const client = await gptscript .Client .init ();
143
146
try {
144
- const client = new gptscript.Client ();
145
147
const run = client .run (' ./hello.gpt' , opts);
146
148
console .log (await run .text ());
147
149
} catch (e) {
148
150
console .error (e);
149
151
}
152
+ client .close ();
150
153
}
151
154
```
152
155
@@ -178,8 +181,8 @@ const opts = {
178
181
};
179
182
180
183
async function streamExecFileWithEvents () {
184
+ const client = await gptscript .Client .init ();
181
185
try {
182
- const client = new gptscript.Client ();
183
186
const run = client .run (' ./test.gpt' , opts);
184
187
185
188
run .on (gptscript .RunEventType .Event , data => {
@@ -190,6 +193,7 @@ async function streamExecFileWithEvents() {
190
193
} catch (e) {
191
194
console .error (e);
192
195
}
196
+ client .close ();
193
197
}
194
198
```
195
199
@@ -218,7 +222,7 @@ const t = {
218
222
};
219
223
220
224
async function streamExecFileWithEvents () {
221
- const client = new gptscript.Client ();
225
+ const client = await gptscript .Client . init ();
222
226
let run = client .evaluate (t, opts);
223
227
try {
224
228
// Wait for the initial run to complete.
@@ -238,6 +242,7 @@ async function streamExecFileWithEvents() {
238
242
console .error (e);
239
243
}
240
244
245
+ client .close ();
241
246
242
247
// The state here should either be RunState.Finished (on success) or RunState.Error (on error).
243
248
console .log (run .state )
0 commit comments