diff --git a/core/cat/looking_glass/stray_cat.py b/core/cat/looking_glass/stray_cat.py index a065e655..e61feb37 100644 --- a/core/cat/looking_glass/stray_cat.py +++ b/core/cat/looking_glass/stray_cat.py @@ -6,7 +6,7 @@ from typing import Literal, get_args, List, Dict, Union, Any from langchain.docstore.document import Document -from langchain_core.messages import SystemMessage, BaseMessage +from langchain_core.messages import BaseMessage, HumanMessage from langchain_core.runnables import RunnableConfig, RunnableLambda from langchain_core.prompts import ChatPromptTemplate from langchain_core.output_parsers.string import StrOutputParser @@ -322,8 +322,8 @@ def llm(self, prompt: str, stream: bool = False) -> str: # here we deal with motherfucking langchain prompt = ChatPromptTemplate( messages=[ - SystemMessage(content=prompt) - # TODO: add here optional convo history passed to the method, + HumanMessage(content=prompt) # We decided to use HumanMessage for wide-range compatibility even if it could bring some problem with tokenizers + # TODO: add here optional convo history passed to the method, # or taken from working memory ] ) diff --git a/core/pyproject.toml b/core/pyproject.toml index b0aa7f56..940eefa2 100644 --- a/core/pyproject.toml +++ b/core/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "Cheshire-Cat" description = "Production ready AI assistant framework" -version = "1.8.0" +version = "1.8.1" requires-python = ">=3.10" license = { file = "LICENSE" } authors = [