From ccfeb663a855f7749096e9b62df34e71887ea2d5 Mon Sep 17 00:00:00 2001 From: oKatanaaa Date: Thu, 5 Oct 2023 00:39:26 +0400 Subject: [PATCH] fix: dependencies and tokenizer --- lima_gui/state/settings.py | 2 +- requirements.txt | 7 +++++-- setup.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lima_gui/state/settings.py b/lima_gui/state/settings.py index 43a838a..aa79c41 100644 --- a/lima_gui/state/settings.py +++ b/lima_gui/state/settings.py @@ -21,7 +21,7 @@ class Settings: 'coding', 'meta problems' ] - DEFAULT_TOKENIZER = 'meta-llama/Llama-2-7b-hf' + DEFAULT_TOKENIZER = 'OpenAssistant/llama2-13b-orca-8k-3319' @staticmethod def get_instance(): diff --git a/requirements.txt b/requirements.txt index 234f320..af6cf8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,5 @@ -pandas==1.5.3 -PySide6==6.5.1 \ No newline at end of file +pandas~=1.5.3 +PySide6~=6.5.1 +openai +transformers~=4.31 +tokenizers~=0.13.3 \ No newline at end of file diff --git a/setup.py b/setup.py index 4f30184..e7ee06d 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ setup( name='lima-gui', - version='0.4.1', + version='0.4.2', description='A simple GUI utility for gathering LIMA-like chat data.', long_description=long_description, long_description_content_type="text/markdown",