Skip to content

Commit

Permalink
fix whitelist issue with private chats + update payment process for c…
Browse files Browse the repository at this point in the history
…hat completion and dalle
  • Loading branch information
fegloff committed Jan 23, 2024
1 parent 314e514 commit 408f39d
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 31 deletions.
2 changes: 1 addition & 1 deletion src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ export default {
// ? parseInt(process.env.WORD_COUNT_BETWEEN)
// : 10,
priceAdjustment: process.env.PRICE_ADJUSTMENT
? parseInt(process.env.PRICE_ADJUSTMENT)
? parseFloat(process.env.PRICE_ADJUSTMENT)
: 2,
isFreePromptChatGroups: false,
isEnabled: Boolean(parseInt(process.env.CHAT_GPT_ENABLED ?? '1')),
Expand Down
6 changes: 3 additions & 3 deletions src/modules/open-ai/api/openAi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -281,11 +281,11 @@ export const getChatModelPrice = (
model: ChatModel,
inCents = true,
inputTokens: number,
outPutTokens?: number
outputTokens?: number
): number => {
let price = model.inputPrice * inputTokens
price += outPutTokens
? outPutTokens * model.outputPrice
price += outputTokens
? outputTokens * model.outputPrice
: model.maxContextTokens * model.outputPrice
price = inCents ? price * 100 : price
return price / 1000
Expand Down
11 changes: 6 additions & 5 deletions src/modules/open-ai/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -238,23 +238,24 @@ export const hasPrefix = (prompt: string): string => {
)
}

export const getPromptPrice = (completion: string, data: ChatPayload): { price: number, promptTokens: number, completionTokens: number } => {
export const getPromptPrice = (completion: string, data: ChatPayload): { price: number, promptTokens: number, completionTokens: number, totalTokens: number } => {
const { conversation, ctx, model } = data

const currentUsage = data.prompt ? 0 : ctx.session.openAi.chatGpt.usage
const prompt = data.prompt ? data.prompt : conversation[conversation.length - 1].content
const promptTokens = getTokenNumber(prompt as string)
const promptTokens = getTokenNumber(prompt as string) + currentUsage
const completionTokens = getTokenNumber(completion)
const modelPrice = getChatModel(model)
const price =
getChatModelPrice(modelPrice, true, promptTokens, completionTokens) *
config.openAi.chatGpt.priceAdjustment
conversation.push({ content: completion, role: 'system' })
ctx.session.openAi.chatGpt.usage += promptTokens + completionTokens
ctx.session.openAi.chatGpt.usage += completionTokens
ctx.session.openAi.chatGpt.price += price
return {
price,
promptTokens,
completionTokens
completionTokens,
totalTokens: data.prompt ? promptTokens + completionTokens : ctx.session.openAi.chatGpt.usage
}
}

Expand Down
6 changes: 2 additions & 4 deletions src/modules/open-ai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ import { Callbacks } from '../types'
import { LlmsBot } from '../llms'
import { type PhotoSize } from 'grammy/types'

const priceAdjustment = config.openAi.chatGpt.priceAdjustment
export class OpenAIBot implements PayableBot {
public readonly module = 'OpenAIBot'
private readonly logger: Logger
Expand Down Expand Up @@ -90,7 +91,6 @@ export class OpenAIBot implements PayableBot {

public getEstimatedPrice (ctx: any): number {
try {
const priceAdjustment = config.openAi.chatGpt.priceAdjustment
const prompts = ctx.match
if (this.isSupportedImageReply(ctx) && !isNaN(+prompts)) {
const imageNumber = ctx.message?.caption || ctx.message?.text
Expand Down Expand Up @@ -378,9 +378,7 @@ export class OpenAIBot implements PayableBot {
ctx.transient.analytics.actualResponseTime = now()
const price = getPromptPrice(completion, data)
this.logger.info(
`streamChatCompletion result = tokens: ${
price.promptTokens + price.completionTokens
} | ${model} | price: ${price.price}¢`
`streamChatCompletion result = tokens: ${price.totalTokens} | ${model} | price: ${price.price}¢` // price.promptTokens + price.completionTokens }
)
return {
price: price.price,
Expand Down
31 changes: 16 additions & 15 deletions src/modules/open-ai/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,60 +22,61 @@ export enum ChatGPTModelsEnum {
export const ChatGPTModels: Record<string, ChatModel> = {
'gpt-4': {
name: 'gpt-4',
inputPrice: 0.03,
outputPrice: 0.06,
inputPrice: 0.03, // 3
outputPrice: 0.06, // 6
maxContextTokens: 8192,
chargeType: 'TOKEN'
},
'gpt-4-32k': {
name: 'gpt-4-32k',
inputPrice: 0.06,
outputPrice: 0.12,
inputPrice: 0.06, // 6
outputPrice: 0.12, // 12
maxContextTokens: 32000,
chargeType: 'TOKEN'
},
'gpt-3.5-turbo': {
name: 'gpt-3.5-turbo',
inputPrice: 0.0015,
outputPrice: 0.002,
inputPrice: 0.0015, // 0.15
outputPrice: 0.002, // 0.2
maxContextTokens: 4000,
chargeType: 'TOKEN'
},
'gpt-3.5-turbo-16k': {
name: 'gpt-3.5-turbo-16k',
inputPrice: 0.003,
outputPrice: 0.004,
inputPrice: 0.003, // 0.3
outputPrice: 0.004, // 0.4
maxContextTokens: 16000,
chargeType: 'TOKEN'
},
'gpt-4-vision-preview': {
name: 'gpt-4-vision-preview',
inputPrice: 0.03,
outputPrice: 0.06,
inputPrice: 0.03, // 3
outputPrice: 0.06, // 6
maxContextTokens: 16000,
chargeType: 'TOKEN'
}
}

// needs to be in cents
export const DalleGPTModels: Record<string, DalleGPTModel> = {
'1024x1792': {
size: '1024x1792',
price: 0.10
price: 12 // 0.12
},
'1792x1024': {
size: '1792x1024',
price: 0.10
price: 12 // 0.12
},
'1024x1024': {
size: '1024x1024',
price: 0.10
price: 8 // 0.08
},
'512x512': {
size: '512x512',
price: 0.10
price: 8 // 0.10
},
'256x256': {
size: '256x256',
price: 0.10
price: 8 // 0.10
}
}
4 changes: 1 addition & 3 deletions src/modules/payment/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,10 +291,8 @@ export class BotPayments {
return true
}
}
return false
} else {
return true
}
return false
}

public isPaymentsEnabled (): boolean {
Expand Down

0 comments on commit 408f39d

Please sign in to comment.