-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdocument.txt
141 lines (108 loc) · 4.65 KB
/
document.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
using GPTCodeQualitySharp.Dataset;
using GPTCodeQualitySharp.Document.Partial;
using GPTCodeQualitySharp.Evaluator.API.Impl;
using OpenAI_API;
using OpenAI_API.Chat;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GPTCodeQualitySharp.Evaluator.API
{
public class OpenAIChatEvaluator : ICodeChunkEvaluatorAsync
{
// Prompt template property
string PromptTemplate { get; }
public string ApiKey { get; set; }
private readonly decimal _temperature;
private readonly IValueStore _valueStore;
public OpenAIChatEvaluator(string promptTemplate, IValueStore valueStore, string ApiKey, decimal temperature = 0)
{
PromptTemplate = promptTemplate;
_temperature = temperature;
_valueStore = valueStore;
if (!PromptTemplate.Contains("{CODE}"))
{
throw new ArgumentException("Prompt template must contain {CODE} placeholder");
}
if (ApiKey != null && ApiKey.Length > 1)
{
this.ApiKey = ApiKey;
}
else
{
throw new ArgumentException("No API key provided - please provide an API key or set the OPENAI_API_KEY environment variable");
}
}
private string PreparePrompt(CodeChunk codeChunk)
{
return PromptTemplate.Replace("{CODE}", codeChunk.Code);
}
public async Task<EvaluatorResult> EvaluateAsync(CodeChunk codeChunk)
{
// TODO: Make this robust
string prompt = PreparePrompt(codeChunk);
// Split prompt by {ROLE} and anything before it is a system message
// {ROLE} this is user text {ROLE} this is assistant text {ROLE} this is user text etc.
string[] promptSplit = prompt.Split("{ROLE}");
var cacheKey = new HashableDataset(promptSplit);
// Cache hit
// TODO: If temperature == 0 only
if(_valueStore.TryGetValue(ValueStoreTable.ApiResult, cacheKey, out string cacheResult))
{
return new EvaluatorResult(true, cacheResult);
}
if (promptSplit.Length == 0)
{
throw new ArgumentException("Prompt template must contain {ROLE} placeholder");
}
if (promptSplit.Length == 0)
{
throw new ArgumentException("No chat messages found in prompt");
}
// Create the chat
/*var chat = api.Chat.CreateConversation();
/// give instruction as System
chat.AppendSystemMessage("You are a teacher who helps children understand if things are animals or not. If the user tells you an animal, you say \"yes\". If the user tells you something that is not an animal, you say \"no\". You only ever respond with \"yes\" or \"no\". You do not say anything else.");
// give a few examples as user and assistant
chat.AppendUserInput("Is this an animal? Cat");
chat.AppendExampleChatbotOutput("Yes");
chat.AppendUserInput("Is this an animal? House");
chat.AppendExampleChatbotOutput("No");*/
OpenAIAPI api = new OpenAIAPI(new APIAuthentication(ApiKey));
var chat = api.Chat.CreateConversation();
// Add the chat messages to the chat
chat.AppendSystemMessage(promptSplit[0]);
if (promptSplit.Length >= 1)
{
// Otherwise we have a system message and a list of user and assistant messages
for (int i = 1; i < promptSplit.Length; i++)
{
if (i % 2 == 1)
{
// User message
chat.AppendUserInput(promptSplit[i]);
}
else
{
// Assistant message
chat.AppendExampleChatbotOutput(promptSplit[i]);
}
}
}
chat.RequestParameters.Temperature = (double)_temperature;
// Get the response
var response = await chat.GetResponseFromChatbotAsync();
if(response != null)
{
// Cache the response
_valueStore.StoreValue(ValueStoreTable.OpenAIResponses, cacheKey, response);
}
var evaluator = new OpenAIResultEvaluator(prompt, response);
EvaluatorResult result;
evaluator.TryGetJSONString(out result);
return result;
}
}
}