diff --git a/src/modules/llms/openaiBot.ts b/src/modules/llms/openaiBot.ts index 80b2a608..24c27ac7 100644 --- a/src/modules/llms/openaiBot.ts +++ b/src/modules/llms/openaiBot.ts @@ -123,7 +123,6 @@ export class OpenAIBot extends LlmsBase { ctx.hasCommand([ SupportedCommands.chat, SupportedCommands.ask, - SupportedCommands.gpt4, SupportedCommands.gpt, SupportedCommands.gpto ]) || @@ -149,8 +148,8 @@ export class OpenAIBot extends LlmsBase { (ctx.message?.text?.startsWith('new ') && ctx.chat?.type === 'private') && this.checkModel(ctx)) ) { await this.onStop(ctx) - this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4) - await this.onChat(ctx, LlmsModelsEnum.GPT_4, true, false) + this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4O) + await this.onChat(ctx, LlmsModelsEnum.GPT_4O, true, false) return } @@ -160,6 +159,12 @@ export class OpenAIBot extends LlmsBase { return } + if (ctx.hasCommand(SupportedCommands.gpt4)) { + this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4) + await this.onChat(ctx, LlmsModelsEnum.GPT_4, true, false) + return + } + if (ctx.hasCommand(SupportedCommands.ask32)) { this.updateSessionModel(ctx, LlmsModelsEnum.GPT_4_32K) await this.onChat(ctx, LlmsModelsEnum.GPT_4_32K, true, false) diff --git a/src/modules/subagents/llamaSubagent.ts b/src/modules/subagents/llamaSubagent.ts index 8846fe6d..28617980 100644 --- a/src/modules/subagents/llamaSubagent.ts +++ b/src/modules/subagents/llamaSubagent.ts @@ -65,7 +65,7 @@ export class LlamaAgent extends SubagentBase { await Promise.all(urls.map(async url => { let collection = ctx.session.collections.activeCollections.find(c => c.url === url) if (!collection) { - await this.addUrlToCollection(ctx, ctx.chat?.id, url, msg.content as string) + await this.addUrlToCollection(ctx, ctx.chat?.id ?? 0, url, msg.content as string) if (!ctx.session.collections.isProcessingQueue) { ctx.session.collections.isProcessingQueue = true await this.onCheckCollectionStatus(ctx)