From 2486c0bf9588af0291eb1430d1eeca5e563e36ca Mon Sep 17 00:00:00 2001 From: Jana Peper Date: Tue, 7 Jan 2025 13:56:16 +0100 Subject: [PATCH] feat: add changetone provider Signed-off-by: Jana Peper --- lib/change_tone.py | 40 ++++++++++++++++++++++++++++++++++++++++ lib/main.py | 11 ++++++++++- lib/task_processors.py | 2 ++ 3 files changed, 52 insertions(+), 1 deletion(-) create mode 100644 lib/change_tone.py diff --git a/lib/change_tone.py b/lib/change_tone.py new file mode 100644 index 0000000..2ff9c50 --- /dev/null +++ b/lib/change_tone.py @@ -0,0 +1,40 @@ +# SPDX-FileCopyrightText: 2024 Nextcloud GmbH and Nextcloud contributors +# SPDX-License-Identifier: AGPL-3.0-or-later +"""A chain that changes the tone of a text +""" + +from typing import Any + +from langchain.prompts import PromptTemplate +from langchain.schema.prompt_template import BasePromptTemplate +from langchain_core.runnables import Runnable + +class ChangeToneProcessor(): + + runnable: Runnable + + """ + A topics chain + """ + system_prompt: str = "You're an AI assistant tasked with finding the topic keywords of the text given to you by the user." + user_prompt: BasePromptTemplate = PromptTemplate( + input_variables=["text", "tone"], + template=""" + Reformulate the following text in a " {tone} " tone in its original language. Output only the reformulation. Here is the text: + + " + {text} + " + + Output only the reformulated text, nothing else, no introductory sentence. Use the same language as the original text. + """ + ) + + def __init__(self, runnable: Runnable): + self.runnable = runnable + + + def __call__(self, inputs: dict[str,Any], + ) -> dict[str, Any]: + output = self.runnable.invoke({"user_prompt": self.user_prompt.format_prompt(text=inputs['input'], tone=inputs['tone']), "system_prompt": self.system_prompt}) + return {'output': output} \ No newline at end of file diff --git a/lib/main.py b/lib/main.py index a9b19f6..2e32dce 100644 --- a/lib/main.py +++ b/lib/main.py @@ -15,7 +15,7 @@ from fastapi import FastAPI from nc_py_api import AsyncNextcloudApp, NextcloudApp, NextcloudException from nc_py_api.ex_app import LogLvl, persistent_storage, run_app, set_handlers -from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider +from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider, ShapeEnumValue models_to_fetch = { "https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/4f0c246f125fc7594238ebe7beb1435a8335f519/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf": { "save_path": os.path.join(persistent_storage(), "Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf") }, @@ -123,6 +123,15 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str: name="Local Large language Model: " + model, task_type=task, expected_runtime=30, + input_shape_enum_values= { + "tone": [ + ShapeEnumValue(name= "Friendlier", value= "friendlier"), + ShapeEnumValue(name= "More formal", value= "more formal"), + ShapeEnumValue(name= "Funnier", value= "funnier"), + ShapeEnumValue(name= "More casual", value= "more casual"), + ShapeEnumValue(name= "More urgent", value= "more urgent"), + ], + } if task == "core:text2text:changetone" else {} ) await nc.providers.task_processing.register(provider) print(f"Registered {task_processor_name}", flush=True) diff --git a/lib/task_processors.py b/lib/task_processors.py index d84c09b..701baf1 100644 --- a/lib/task_processors.py +++ b/lib/task_processors.py @@ -17,6 +17,7 @@ from free_prompt import FreePromptProcessor from headline import HeadlineProcessor from proofread import ProofreadProcessor +from change_tone import ChangeToneProcessor from chatwithtools import ChatWithToolsProcessor from topics import TopicsProcessor from summarize import SummarizeProcessor @@ -134,6 +135,7 @@ def generate_task_processors_for_model(file_name, task_processors): task_processors[model_name + ":core:text2text"] = lambda: FreePromptProcessor(generate_llm_chain(file_name)) task_processors[model_name + ":core:text2text:chat"] = lambda: ChatProcessor(generate_chat_chain(file_name)) task_processors[model_name + ":core:text2text:proofread"] = lambda: ProofreadProcessor(generate_llm_chain(file_name)) + task_processors[model_name + ":core:text2text:changetone"] = lambda: ChangeToneProcessor(generate_llm_chain(file_name)) task_processors[model_name + ":core:text2text:chatwithtools"] = lambda: ChatWithToolsProcessor(generate_chat_chain(file_name)) # chains[model_name + ":core:contextwrite"] = lambda: ContextWriteChain(llm_chain=llm_chain())