diff --git a/lima_gui/state/settings.py b/lima_gui/state/settings.py index 43a838a..aa79c41 100644 --- a/lima_gui/state/settings.py +++ b/lima_gui/state/settings.py @@ -21,7 +21,7 @@ class Settings: 'coding', 'meta problems' ] - DEFAULT_TOKENIZER = 'meta-llama/Llama-2-7b-hf' + DEFAULT_TOKENIZER = 'OpenAssistant/llama2-13b-orca-8k-3319' @staticmethod def get_instance(): diff --git a/requirements.txt b/requirements.txt index 234f320..af6cf8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,5 @@ -pandas==1.5.3 -PySide6==6.5.1 \ No newline at end of file +pandas~=1.5.3 +PySide6~=6.5.1 +openai +transformers~=4.31 +tokenizers~=0.13.3 \ No newline at end of file diff --git a/setup.py b/setup.py index 4f30184..e7ee06d 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ setup( name='lima-gui', - version='0.4.1', + version='0.4.2', description='A simple GUI utility for gathering LIMA-like chat data.', long_description=long_description, long_description_content_type="text/markdown",