Skip to content

Commit

Permalink
feat: add changetone provider
Browse files Browse the repository at this point in the history
Signed-off-by: Jana Peper <[email protected]>
  • Loading branch information
janepie committed Jan 7, 2025
1 parent 7a94d7d commit 2486c0b
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 1 deletion.
40 changes: 40 additions & 0 deletions lib/change_tone.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2024 Nextcloud GmbH and Nextcloud contributors
# SPDX-License-Identifier: AGPL-3.0-or-later
"""A chain that changes the tone of a text
"""

from typing import Any

from langchain.prompts import PromptTemplate
from langchain.schema.prompt_template import BasePromptTemplate
from langchain_core.runnables import Runnable

class ChangeToneProcessor():

runnable: Runnable

"""
A topics chain
"""
system_prompt: str = "You're an AI assistant tasked with finding the topic keywords of the text given to you by the user."
user_prompt: BasePromptTemplate = PromptTemplate(
input_variables=["text", "tone"],
template="""
Reformulate the following text in a " {tone} " tone in its original language. Output only the reformulation. Here is the text:
"
{text}
"
Output only the reformulated text, nothing else, no introductory sentence. Use the same language as the original text.
"""
)

def __init__(self, runnable: Runnable):
self.runnable = runnable


def __call__(self, inputs: dict[str,Any],
) -> dict[str, Any]:
output = self.runnable.invoke({"user_prompt": self.user_prompt.format_prompt(text=inputs['input'], tone=inputs['tone']), "system_prompt": self.system_prompt})
return {'output': output}
11 changes: 10 additions & 1 deletion lib/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from fastapi import FastAPI
from nc_py_api import AsyncNextcloudApp, NextcloudApp, NextcloudException
from nc_py_api.ex_app import LogLvl, persistent_storage, run_app, set_handlers
from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider
from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider, ShapeEnumValue

models_to_fetch = {
"https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/4f0c246f125fc7594238ebe7beb1435a8335f519/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf": { "save_path": os.path.join(persistent_storage(), "Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf") },
Expand Down Expand Up @@ -123,6 +123,15 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str:
name="Local Large language Model: " + model,
task_type=task,
expected_runtime=30,
input_shape_enum_values= {
"tone": [
ShapeEnumValue(name= "Friendlier", value= "friendlier"),
ShapeEnumValue(name= "More formal", value= "more formal"),
ShapeEnumValue(name= "Funnier", value= "funnier"),
ShapeEnumValue(name= "More casual", value= "more casual"),
ShapeEnumValue(name= "More urgent", value= "more urgent"),
],
} if task == "core:text2text:changetone" else {}
)
await nc.providers.task_processing.register(provider)
print(f"Registered {task_processor_name}", flush=True)
Expand Down
2 changes: 2 additions & 0 deletions lib/task_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from free_prompt import FreePromptProcessor
from headline import HeadlineProcessor
from proofread import ProofreadProcessor
from change_tone import ChangeToneProcessor
from chatwithtools import ChatWithToolsProcessor
from topics import TopicsProcessor
from summarize import SummarizeProcessor
Expand Down Expand Up @@ -134,6 +135,7 @@ def generate_task_processors_for_model(file_name, task_processors):
task_processors[model_name + ":core:text2text"] = lambda: FreePromptProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:chat"] = lambda: ChatProcessor(generate_chat_chain(file_name))
task_processors[model_name + ":core:text2text:proofread"] = lambda: ProofreadProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:changetone"] = lambda: ChangeToneProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:chatwithtools"] = lambda: ChatWithToolsProcessor(generate_chat_chain(file_name))

# chains[model_name + ":core:contextwrite"] = lambda: ContextWriteChain(llm_chain=llm_chain())

0 comments on commit 2486c0b

Please sign in to comment.