|
15 | 15 | from fastapi import FastAPI |
16 | 16 | from nc_py_api import AsyncNextcloudApp, NextcloudApp, NextcloudException |
17 | 17 | from nc_py_api.ex_app import LogLvl, persistent_storage, run_app, set_handlers |
18 | | -from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider |
| 18 | +from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider, ShapeEnumValue |
19 | 19 |
|
20 | 20 | models_to_fetch = { |
21 | 21 | "https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/4f0c246f125fc7594238ebe7beb1435a8335f519/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf": { "save_path": os.path.join(persistent_storage(), "Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf") }, |
@@ -123,6 +123,15 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str: |
123 | 123 | name="Local Large language Model: " + model, |
124 | 124 | task_type=task, |
125 | 125 | expected_runtime=30, |
| 126 | + input_shape_enum_values= { |
| 127 | + "tone": [ |
| 128 | + ShapeEnumValue(name= "Friendlier", value= "friendlier"), |
| 129 | + ShapeEnumValue(name= "More formal", value= "more formal"), |
| 130 | + ShapeEnumValue(name= "Funnier", value= "funnier"), |
| 131 | + ShapeEnumValue(name= "More casual", value= "more casual"), |
| 132 | + ShapeEnumValue(name= "More urgent", value= "more urgent"), |
| 133 | + ], |
| 134 | + } if task == "core:text2text:changetone" else {} |
126 | 135 | ) |
127 | 136 | await nc.providers.task_processing.register(provider) |
128 | 137 | print(f"Registered {task_processor_name}", flush=True) |
|
0 commit comments