@@ -11,7 +11,7 @@ use crate::{
11
11
} ;
12
12
use actix_web_lab:: sse:: Sender ;
13
13
pub use data:: * ;
14
- use openai_api_rs:: v1:: chat_completion:: FinishReason ;
14
+ use openai_api_rs:: v1:: chat_completion:: { FinishReason , FunctionCallType } ;
15
15
use openai_api_rs:: v1:: {
16
16
api:: Client ,
17
17
chat_completion:: {
@@ -93,7 +93,7 @@ impl<D: RepositoryEmbeddingsDB, M: EmbeddingsModel> Conversation<D, M> {
93
93
#[ allow( unused_labels) ]
94
94
' conversation: loop {
95
95
//Generate a request with the message history and functions
96
- let request = generate_completion_request ( self . messages . clone ( ) , "auto" ) ;
96
+ let request = generate_completion_request ( self . messages . clone ( ) , FunctionCallType :: Auto ) ;
97
97
98
98
match self . send_request ( request) {
99
99
Ok ( response) => {
@@ -199,7 +199,7 @@ impl<D: RepositoryEmbeddingsDB, M: EmbeddingsModel> Conversation<D, M> {
199
199
//Generate a request with the message history and no functions
200
200
let request = generate_completion_request (
201
201
self . messages . clone ( ) ,
202
- "none" ,
202
+ FunctionCallType :: None ,
203
203
) ;
204
204
emit ( & self . sender , QueryEvent :: GenerateResponse ( None ) )
205
205
. await ;
@@ -261,7 +261,7 @@ fn sanitize_query(query: &str) -> Result<String> {
261
261
content : sanitize_query_prompt ( query) ,
262
262
} ;
263
263
let client = Client :: new ( env:: var ( "OPENAI_API_KEY" ) ?) ;
264
- let request = generate_completion_request ( vec ! [ message] , "none" ) ;
264
+ let request = generate_completion_request ( vec ! [ message] , FunctionCallType :: None ) ;
265
265
let response = client. chat_completion ( request) ?;
266
266
if let FinishReason :: stop = response. choices [ 0 ] . finish_reason {
267
267
let sanitized_query = response. choices [ 0 ]
0 commit comments