Skip to content

Commit

Permalink
Merging work from various projects
Browse files Browse the repository at this point in the history
  • Loading branch information
johnrizzo1 committed Dec 13, 2024
1 parent c72129c commit 2efe755
Show file tree
Hide file tree
Showing 16 changed files with 4,808 additions and 5 deletions.
6 changes: 3 additions & 3 deletions ada/modules/ada.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from ada.modules.logging import logger, log_info, log_warning, log_error, log_tool_call, log_ws_event, log_runtime
from ada.modules.async_microphone import AsyncMicrophone
from ada.modules.tools import function_map, tools
from ada.modules.tools import tool_map, tools

import os
import sys
Expand Down Expand Up @@ -185,9 +185,9 @@ async def handle_function_call(self, event, websocket):
await self.execute_function_call(function_name, call_id, args, websocket)

async def execute_function_call(self, function_name, call_id, args, websocket):
if function_name in function_map:
if function_name in tool_map:
try:
result = await function_map[function_name](**args)
result = await tool_map[function_name](**args)
log_tool_call(function_name, args, result)
except Exception as e:
error_message = f"Error executing function '{function_name}': {str(e)}"
Expand Down
4 changes: 2 additions & 2 deletions ada/modules/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,15 @@ async def open_browser():


# Map function names to their corresponding functions
function_map = {
tool_map = {
"get_current_time": get_current_time,
"open_browser": open_browser,
"start_program": start_program,
}

# Tools array for session initialization
tools = [
{
{
"type": "function",
"name": "get_current_time",
"description": "Returns the current time.",
Expand Down
327 changes: 327 additions & 0 deletions research/ada_research.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,327 @@
{
"cells": [
{
"cell_type": "code",
"metadata": {
"ExecuteTime": {
"end_time": "2024-12-01T22:16:58.010394Z",
"start_time": "2024-12-01T22:16:56.971463Z"
}
},
"source": [
"from langchain_community.document_loaders import WebBaseLoader\n",
"from langchain_core.messages import AIMessage, BaseMessage, HumanMessage\n",
"from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
"from langchain_core.vectorstores import InMemoryVectorStore\n",
"from langchain_openai import ChatOpenAI, OpenAIEmbeddings\n",
"from langchain_text_splitters import RecursiveCharacterTextSplitter\n",
"from langchain.chains import create_history_aware_retriever, create_retrieval_chain\n",
"from langchain.chains.combine_documents import create_stuff_documents_chain\n",
"from langchain.tools.retriever import create_retriever_tool\n",
"from langgraph.checkpoint.memory import MemorySaver\n",
"from langgraph.graph import START, StateGraph\n",
"from langgraph.graph.message import add_messages\n",
"from langgraph.prebuilt import create_react_agent\n",
"from typing import Sequence\n",
"from typing_extensions import Annotated, TypedDict\n",
"# import bs3\n",
"import bs4\n",
"from dotenv import load_dotenv"
],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"USER_AGENT environment variable not set, consider setting it to identify your requests.\n"
]
}
],
"execution_count": 1
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:21.105936Z",
"start_time": "2024-11-25T14:26:21.077429Z"
}
},
"outputs": [],
"source": [
"load_dotenv()\n",
"\n",
"# Create the LLM\n",
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:23.789087Z",
"start_time": "2024-11-25T14:26:21.194275Z"
}
},
"outputs": [],
"source": [
"### Construct retriever ###\n",
"loader = WebBaseLoader(\n",
" web_paths=(\"https://lilianweng.github.io/posts/2023-06-23-agent/\",),\n",
" bs_kwargs=dict(\n",
" parse_only=bs4.SoupStrainer(\n",
" class_=(\"post-content\", \"post-title\", \"post-header\")\n",
" )\n",
" ),\n",
")\n",
"docs = loader.load()\n",
"\n",
"text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)\n",
"splits = text_splitter.split_documents(docs)\n",
"vectorstore = InMemoryVectorStore.from_documents(\n",
" documents=splits, embedding=OpenAIEmbeddings()\n",
")\n",
"retriever = vectorstore.as_retriever()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:23.798581Z",
"start_time": "2024-11-25T14:26:23.793658Z"
}
},
"outputs": [],
"source": [
"### Contextualize question ###\n",
"contextualize_q_system_prompt = (\n",
" \"Given a chat history and the latest user question \"\n",
" \"which might reference context in the chat history, \"\n",
" \"formulate a standalone question which can be understood \"\n",
" \"without the chat history. Do NOT answer the question, \"\n",
" \"just reformulate it if needed and otherwise return it as is.\"\n",
")\n",
"contextualize_q_prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
" (\"system\", contextualize_q_system_prompt),\n",
" MessagesPlaceholder(\"chat_history\"),\n",
" (\"human\", \"{input}\"),\n",
" ]\n",
")\n",
"history_aware_retriever = create_history_aware_retriever(\n",
" llm, retriever, contextualize_q_prompt\n",
")\n",
"\n",
"\n",
"### Answer question ###\n",
"system_prompt = (\n",
" \"You are an assistant for question-answering tasks. \"\n",
" \"Use the following pieces of retrieved context to answer \"\n",
" \"the question. If you don't know the answer, say that you \"\n",
" \"don't know. Use three sentences maximum and keep the \"\n",
" \"answer concise.\"\n",
" \"\\n\\n\"\n",
" \"{context}\"\n",
")\n",
"qa_prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
" (\"system\", system_prompt),\n",
" MessagesPlaceholder(\"chat_history\"),\n",
" (\"human\", \"{input}\"),\n",
" ]\n",
")\n",
"question_answer_chain = create_stuff_documents_chain(llm, qa_prompt)\n",
"\n",
"rag_chain = create_retrieval_chain(history_aware_retriever, question_answer_chain)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:23.857963Z",
"start_time": "2024-11-25T14:26:23.854315Z"
}
},
"outputs": [],
"source": [
"### Statefully manage chat history ###\n",
"class State(TypedDict):\n",
" input: str\n",
" chat_history: Annotated[Sequence[BaseMessage], add_messages]\n",
" context: str\n",
" answer: str\n",
"\n",
"\n",
"def call_model(state: State):\n",
" response = rag_chain.invoke(state)\n",
" return {\n",
" \"chat_history\": [\n",
" HumanMessage(state[\"input\"]),\n",
" AIMessage(response[\"answer\"]),\n",
" ],\n",
" \"context\": response[\"context\"],\n",
" \"answer\": response[\"answer\"],\n",
" }\n",
"\n",
"\n",
"workflow = StateGraph(state_schema=State)\n",
"workflow.add_edge(START, \"model\")\n",
"workflow.add_node(\"model\", call_model)\n",
"\n",
"memory = MemorySaver()\n",
"app = workflow.compile(checkpointer=memory)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:25.645801Z",
"start_time": "2024-11-25T14:26:23.912690Z"
}
},
"outputs": [],
"source": [
"config = {\"configurable\": {\"thread_id\": \"abc123\"}}\n",
"\n",
"result = app.invoke(\n",
" {\"input\": \"What is Task Decomposition? Explain why you think your answer is correct.\"},\n",
" config=config,\n",
")\n",
"print(result[\"answer\"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:28.013347Z",
"start_time": "2024-11-25T14:26:25.652388Z"
}
},
"outputs": [],
"source": [
"result = app.invoke(\n",
" {\"input\": \"What is one way of doing it?\"},\n",
" config=config,\n",
")\n",
"print(result[\"answer\"])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:28.060282Z",
"start_time": "2024-11-25T14:26:28.025528Z"
}
},
"outputs": [],
"source": [
"### Build retriever tool ###\n",
"tool = create_retriever_tool(\n",
" retriever,\n",
" \"blog_post_retriever\",\n",
" \"Searches and returns excerpts from the Autonomous Agents blog post.\",\n",
")\n",
"tools = [tool]\n",
"\n",
"\n",
"agent_executor = create_react_agent(llm, tools, checkpointer=memory)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:28.516905Z",
"start_time": "2024-11-25T14:26:28.063034Z"
}
},
"outputs": [],
"source": [
"config = {\"configurable\": {\"thread_id\": \"abc123\"}}\n",
"\n",
"for event in agent_executor.stream(\n",
" {\"messages\": [HumanMessage(content=\"Hi! I'm bob\")]},\n",
" config=config,\n",
" stream_mode=\"values\",\n",
"):\n",
" event[\"messages\"][-1].pretty_print()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:33.938879Z",
"start_time": "2024-11-25T14:26:28.523887Z"
}
},
"outputs": [],
"source": [
"query = \"What is Task Decomposition?\"\n",
"\n",
"for event in agent_executor.stream(\n",
" {\"messages\": [HumanMessage(content=query)]},\n",
" config=config,\n",
" stream_mode=\"values\",\n",
"):\n",
" event[\"messages\"][-1].pretty_print()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2024-11-25T14:26:37.288568Z",
"start_time": "2024-11-25T14:26:33.947002Z"
}
},
"outputs": [],
"source": [
"query = \"What according to the blog post are common ways of doing it? redo the search\"\n",
"\n",
"for event in agent_executor.stream(\n",
" {\"messages\": [HumanMessage(content=query)]},\n",
" config=config,\n",
" stream_mode=\"values\",\n",
"):\n",
" event[\"messages\"][-1].pretty_print()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.8"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 2efe755

Please sign in to comment.