|
| 1 | +from dotenv import load_dotenv |
| 2 | +load_dotenv() |
| 3 | +from llama_index.core.agent import ReActAgent |
| 4 | +from llama_index.llms.openai import OpenAI |
| 5 | +from llama_index.core.tools import FunctionTool |
| 6 | +from llama_index.core import SimpleDirectoryReader, VectorStoreIndex, Settings |
| 7 | +from llama_parse import LlamaParse |
| 8 | +from llama_index.core.tools import QueryEngineTool |
| 9 | + |
| 10 | +# settings |
| 11 | +Settings.llm = OpenAI(model="gpt-3.5-turbo",temperature=0) |
| 12 | + |
| 13 | +# function tools |
| 14 | +def multiply(a: float, b: float) -> float: |
| 15 | + """Multiply two numbers and returns the product""" |
| 16 | + return a * b |
| 17 | + |
| 18 | +multiply_tool = FunctionTool.from_defaults(fn=multiply) |
| 19 | + |
| 20 | +def add(a: float, b: float) -> float: |
| 21 | + """Add two numbers and returns the sum""" |
| 22 | + return a + b |
| 23 | + |
| 24 | +add_tool = FunctionTool.from_defaults(fn=add) |
| 25 | + |
| 26 | +# rag pipeline |
| 27 | +documents = LlamaParse(result_type="markdown").load_data("./data/2023_canadian_budget.pdf") |
| 28 | +index = VectorStoreIndex.from_documents(documents) |
| 29 | +query_engine = index.as_query_engine() |
| 30 | + |
| 31 | +budget_tool = QueryEngineTool.from_defaults( |
| 32 | + query_engine, |
| 33 | + name="canadian_budget_2023", |
| 34 | + description="A RAG engine with some basic facts about the 2023 Canadian federal budget. Ask natural-language questions about the budget." |
| 35 | +) |
| 36 | + |
| 37 | +agent = ReActAgent.from_tools([multiply_tool, add_tool, budget_tool], verbose=True) |
| 38 | + |
| 39 | +response = agent.chat("How much exactly was allocated to a tax credit to promote investment in green technologies in the 2023 Canadian federal budget?") |
| 40 | + |
| 41 | +print(response) |
| 42 | + |
| 43 | +response = agent.chat("How much was allocated to a implement a means-tested dental care program in the 2023 Canadian federal budget?") |
| 44 | + |
| 45 | +print(response) |
| 46 | + |
| 47 | +response = agent.chat("How much was the total of those two allocations added together? Use a tool to answer any questions.") |
| 48 | + |
| 49 | +print(response) |
0 commit comments