-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathchat.py
59 lines (46 loc) · 2.01 KB
/
chat.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
#!/usr/bin/env python3
import os
import duckdb
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.chat_models import ChatOllama
from langchain_community.embeddings import OllamaEmbeddings
from langchain_community.vectorstores import DuckDB
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain.chains import create_retrieval_chain
# Load environment variables
embeddings_model_name = os.environ.get('EMBEDDINGS_MODEL_NAME', 'nomic-embed-text')
model = os.environ.get("MODEL", "llama3:8b")
ollama = ChatOllama(base_url='http://localhost:11434', model=model)
oembed = OllamaEmbeddings(base_url="http://localhost:11434", model=embeddings_model_name)
conn = duckdb.connect(database='./db/duck.db',
config={
}
)
vectorstore = DuckDB(connection=conn, embedding=oembed)
from langchain.retrievers.multi_query import MultiQueryRetriever
# Set logging for the queries
import logging
logging.basicConfig()
logging.getLogger("langchain.retrievers.multi_query").setLevel(logging.INFO)
retriever = MultiQueryRetriever.from_llm(retriever=vectorstore.as_retriever(), llm=ollama, include_original = True)
# https://smith.langchain.com/hub/rlm/rag-prompt
retrieval_qa_chat_prompt = ChatPromptTemplate.from_messages(
[("system", """You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.
Question: {input}
Context: {context}
Answer:""")]
)
combine_docs_chain = create_stuff_documents_chain(ollama, retrieval_qa_chat_prompt)
qachain = create_retrieval_chain(retriever, combine_docs_chain)
while True:
user_input = input("Enter a question: ")
if not user_input:
exit()
print("\n")
result = (qachain.invoke({"input": user_input}))
# Print the result
print("\n\n> Question:")
print(user_input)
print("\n\n> Answer:")
print(result['answer'])
print("\n\n")