Skip to content

Commit

Permalink
Edit ollama code
Browse files Browse the repository at this point in the history
  • Loading branch information
tharun571 committed Sep 27, 2024
1 parent 3d06eb5 commit 7e1afe5
Showing 1 changed file with 99 additions and 5 deletions.
104 changes: 99 additions & 5 deletions src/xmagics/xassist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,43 @@ namespace xcpp
}
};

class url_manager
{
public:

static void save_url(const std::string& model, const std::string& url)
{
std::string url_file_path = model + "_url.txt";
std::ofstream out(url_file_path);
if (out)
{
out << url;
out.close();
std::cout << "URL saved for model " << model << std::endl;
}
else
{
std::cerr << "Failed to open file for writing URL for model " << model << std::endl;
}
}

static std::string load_url(const std::string& model)
{
std::string url_file_path = model + "_url.txt";
std::ifstream in(url_file_path);
std::string url;
if (in)
{
std::getline(in, url);
in.close();
return url;
}

std::cerr << "Failed to open file for reading URL for model " << model << std::endl;
return "";
}
};

class chat_history
{
public:
Expand Down Expand Up @@ -247,11 +284,13 @@ namespace xcpp
curl_helper curl_helper;
const std::string chat_message = xcpp::chat_history::chat("gemini", "user", cell);
const std::string model = xcpp::model_manager::load_model("gemini");

if (model.empty())
{
std::cerr << "Model not found." << std::endl;
return "";
}

const std::string url = "https://generativelanguage.googleapis.com/v1beta/models/"
+ model
+ ":generateContent?key="
Expand All @@ -276,17 +315,58 @@ namespace xcpp
return j["candidates"][0]["content"]["parts"][0]["text"];
}

std::string ollama(const std::string& cell)
{
curl_helper curl_helper;
const std::string url = xcpp::url_manager::load_url("ollama");
const std::string chat_message = xcpp::chat_history::chat("ollama", "user", cell);
const std::string model = xcpp::model_manager::load_model("ollama");

if (model.empty())
{
std::cerr << "Model not found." << std::endl;
return "";
}

const std::string post_data = R"({
"model": ")" + model + R"(",
"messages": [)" + chat_message
+ R"(],
"stream": false
})";

std::string response = curl_helper.perform_request(url, post_data);

json j = json::parse(response);

if (j.find("error") != j.end())
{
std::cerr << "Error: " << j["error"]["message"] << std::endl;
return "";
}

const std::string chat = xcpp::chat_history::chat(
"ollama",
"assistant",
j["message"]["content"]
);

return j["message"]["content"];
}

std::string openai(const std::string& cell, const std::string& key)
{
curl_helper curl_helper;
const std::string url = "https://api.openai.com/v1/chat/completions";
const std::string chat_message = xcpp::chat_history::chat("openai", "user", cell);
const std::string model = xcpp::model_manager::load_model("openai");

if (model.empty())
{
std::cerr << "Model not found." << std::endl;
return "";
}

const std::string post_data = R"({
"model": [)" + model + R"(],
"messages": [)" + chat_message
Expand Down Expand Up @@ -324,7 +404,7 @@ namespace xcpp
std::istream_iterator<std::string>()
);

std::vector<std::string> models = {"gemini", "openai"};
std::vector<std::string> models = {"gemini", "openai", "ollama"};
std::string model = tokens[1];

if (std::find(models.begin(), models.end(), model) == models.end())
Expand Down Expand Up @@ -352,13 +432,23 @@ namespace xcpp
xcpp::model_manager::save_model(model, cell);
return;
}

if(tokens[2] == "--set-url" && model == "ollama")
{
xcpp::url_manager::save_url(model, cell);
return;
}
}

std::string key = xcpp::api_key_manager::load_api_key(model);
if (key.empty())
std::string key;
if(model != "ollama")
{
std::cerr << "API key for model " << model << " is not available." << std::endl;
return;
key = xcpp::api_key_manager::load_api_key(model);
if (key.empty())
{
std::cerr << "API key for model " << model << " is not available." << std::endl;
return;
}
}

std::string response;
Expand All @@ -370,6 +460,10 @@ namespace xcpp
{
response = openai(cell, key);
}
else if (model == "ollama")
{
response = ollama(cell);
}

std::cout << response;
}
Expand Down

0 comments on commit 7e1afe5

Please sign in to comment.