Skip to content

Commit

Permalink
Merge pull request #42 from nextcloud/feat/add-new-providers
Browse files Browse the repository at this point in the history
Feat: add new providers
  • Loading branch information
marcelklehr authored Jan 20, 2025
2 parents 3caabc4 + 0886de3 commit ff1ebde
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 1 deletion.
39 changes: 39 additions & 0 deletions lib/change_tone.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# SPDX-FileCopyrightText: 2024 Nextcloud GmbH and Nextcloud contributors
# SPDX-License-Identifier: AGPL-3.0-or-later
"""A chain that changes the tone of a text
"""

from typing import Any

from langchain.prompts import PromptTemplate
from langchain.schema.prompt_template import BasePromptTemplate
from langchain_core.runnables import Runnable

class ChangeToneProcessor():

runnable: Runnable

"""
A topics chain
"""
system_prompt: str = "You're an AI assistant tasked with rewriting the text given to you by the user in another tone."
user_prompt: BasePromptTemplate = PromptTemplate(
input_variables=["text", "tone"],
template="""Reformulate the following text in a " {tone} " tone in its original language without mentioning the language. Output only the reformulation, nothing else, no introductory sentence. Here is the text:
"
{text}
"
Output only the reformulated text, nothing else. Do not add an introductory sentence.
"""
)

def __init__(self, runnable: Runnable):
self.runnable = runnable


def __call__(self, inputs: dict[str,Any],
) -> dict[str, Any]:
output = self.runnable.invoke({"user_prompt": self.user_prompt.format_prompt(text=inputs['input'], tone=inputs['tone']), "system_prompt": self.system_prompt})
return {'output': output}
11 changes: 10 additions & 1 deletion lib/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from fastapi import FastAPI
from nc_py_api import AsyncNextcloudApp, NextcloudApp, NextcloudException
from nc_py_api.ex_app import LogLvl, persistent_storage, run_app, set_handlers
from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider
from nc_py_api.ex_app.providers.task_processing import TaskProcessingProvider, ShapeEnumValue

models_to_fetch = {
"https://huggingface.co/bartowski/Meta-Llama-3.1-8B-Instruct-GGUF/resolve/4f0c246f125fc7594238ebe7beb1435a8335f519/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf": { "save_path": os.path.join(persistent_storage(), "Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf") },
Expand Down Expand Up @@ -123,6 +123,15 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str:
name="Local Large language Model: " + model,
task_type=task,
expected_runtime=30,
input_shape_enum_values= {
"tone": [
ShapeEnumValue(name= "Friendlier", value= "friendlier"),
ShapeEnumValue(name= "More formal", value= "more formal"),
ShapeEnumValue(name= "Funnier", value= "funnier"),
ShapeEnumValue(name= "More casual", value= "more casual"),
ShapeEnumValue(name= "More urgent", value= "more urgent"),
],
} if task == "core:text2text:changetone" else {}
)
await nc.providers.task_processing.register(provider)
print(f"Registered {task_processor_name}", flush=True)
Expand Down
40 changes: 40 additions & 0 deletions lib/proofread.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2024 Nextcloud GmbH and Nextcloud contributors
# SPDX-License-Identifier: AGPL-3.0-or-later
"""A chain to proofread a text
"""

from typing import Any
from langchain.prompts import PromptTemplate
from langchain.schema.prompt_template import BasePromptTemplate
from langchain_core.runnables import Runnable


class ProofreadProcessor:
"""
A proofreading chain
"""
system_prompt: str = "You're an AI assistant tasked with proofreading the text given to you by the user."
user_prompt: BasePromptTemplate = PromptTemplate(
input_variables=["text"],
template="""
Detect all grammar and spelling mistakes of the following text in its original language. Output only the list of mistakes in bullet points.
"
{text}
"
Give me the list of all mistakes in the above text in its original language. Do not output the language. Output only the list in bullet points, nothing else, no introductory or explanatory text.
"""
)

runnable: Runnable

def __init__(self, runnable: Runnable):
self.runnable = runnable

def __call__(
self,
inputs: dict[str, Any],
) -> dict[str, Any]:
output = self.runnable.invoke({"user_prompt": self.user_prompt.format_prompt(text=inputs['input']), "system_prompt": self.system_prompt})
return {'output': output}
5 changes: 5 additions & 0 deletions lib/task_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
from chat import ChatProcessor
from free_prompt import FreePromptProcessor
from headline import HeadlineProcessor
from proofread import ProofreadProcessor
from change_tone import ChangeToneProcessor
from chatwithtools import ChatWithToolsProcessor
from topics import TopicsProcessor
from summarize import SummarizeProcessor
Expand Down Expand Up @@ -132,5 +134,8 @@ def generate_task_processors_for_model(file_name, task_processors):
# chains[model_name + ":core:text2text:reformulation"] = lambda: ReformulateChain(llm_chain=llm_chain(), chunk_size=chunk_size)
task_processors[model_name + ":core:text2text"] = lambda: FreePromptProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:chat"] = lambda: ChatProcessor(generate_chat_chain(file_name))
task_processors[model_name + ":core:text2text:proofread"] = lambda: ProofreadProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:changetone"] = lambda: ChangeToneProcessor(generate_llm_chain(file_name))
task_processors[model_name + ":core:text2text:chatwithtools"] = lambda: ChatWithToolsProcessor(generate_chat_chain(file_name))

# chains[model_name + ":core:contextwrite"] = lambda: ContextWriteChain(llm_chain=llm_chain())

0 comments on commit ff1ebde

Please sign in to comment.