diff --git a/examples/model_providers/custom_example_agent.py b/examples/model_providers/custom_example_agent.py index d7519a52..f10865c4 100644 --- a/examples/model_providers/custom_example_agent.py +++ b/examples/model_providers/custom_example_agent.py @@ -3,7 +3,7 @@ from openai import AsyncOpenAI -from agents import Agent, OpenAIChatCompletionsModel, Runner, set_tracing_disabled +from agents import Agent, OpenAIChatCompletionsModel, Runner, function_tool, set_tracing_disabled BASE_URL = os.getenv("EXAMPLE_BASE_URL") or "" API_KEY = os.getenv("EXAMPLE_API_KEY") or "" @@ -32,18 +32,22 @@ # Runner.run(agent, ..., run_config=RunConfig(model_provider=PROVIDER)) +@function_tool +def get_weather(city: str): + print(f"[debug] getting weather for {city}") + return f"The weather in {city} is sunny." + + async def main(): # This agent will use the custom LLM provider agent = Agent( name="Assistant", instructions="You only respond in haikus.", model=OpenAIChatCompletionsModel(model=MODEL_NAME, openai_client=client), + tools=[get_weather], ) - result = await Runner.run( - agent, - "Tell me about recursion in programming.", - ) + result = await Runner.run(agent, "What's the weather in Tokyo?") print(result.final_output) diff --git a/examples/model_providers/custom_example_global.py b/examples/model_providers/custom_example_global.py index d7c293b1..ae9756d3 100644 --- a/examples/model_providers/custom_example_global.py +++ b/examples/model_providers/custom_example_global.py @@ -6,6 +6,7 @@ from agents import ( Agent, Runner, + function_tool, set_default_openai_api, set_default_openai_client, set_tracing_disabled, @@ -40,14 +41,21 @@ set_tracing_disabled(disabled=True) +@function_tool +def get_weather(city: str): + print(f"[debug] getting weather for {city}") + return f"The weather in {city} is sunny." + + async def main(): agent = Agent( name="Assistant", instructions="You only respond in haikus.", model=MODEL_NAME, + tools=[get_weather], ) - result = await Runner.run(agent, "Tell me about recursion in programming.") + result = await Runner.run(agent, "What's the weather in Tokyo?") print(result.final_output) diff --git a/examples/model_providers/custom_example_provider.py b/examples/model_providers/custom_example_provider.py index 6e8af42e..4e590198 100644 --- a/examples/model_providers/custom_example_provider.py +++ b/examples/model_providers/custom_example_provider.py @@ -12,6 +12,7 @@ OpenAIChatCompletionsModel, RunConfig, Runner, + function_tool, set_tracing_disabled, ) @@ -47,16 +48,19 @@ def get_model(self, model_name: str | None) -> Model: CUSTOM_MODEL_PROVIDER = CustomModelProvider() +@function_tool +def get_weather(city: str): + print(f"[debug] getting weather for {city}") + return f"The weather in {city} is sunny." + + async def main(): - agent = Agent( - name="Assistant", - instructions="You only respond in haikus.", - ) + agent = Agent(name="Assistant", instructions="You only respond in haikus.", tools=[get_weather]) # This will use the custom model provider result = await Runner.run( agent, - "Tell me about recursion in programming.", + "What's the weather in Tokyo?", run_config=RunConfig(model_provider=CUSTOM_MODEL_PROVIDER), ) print(result.final_output) @@ -64,7 +68,7 @@ async def main(): # If you uncomment this, it will use OpenAI directly, not the custom provider # result = await Runner.run( # agent, - # "Tell me about recursion in programming.", + # "What's the weather in Tokyo?", # ) # print(result.final_output) diff --git a/uv.lock b/uv.lock index 9179bd4f..c3af99bd 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,4 @@ version = 1 -revision = 1 requires-python = ">=3.9" [[package]] @@ -783,7 +782,7 @@ wheels = [ [[package]] name = "openai-agents" -version = "0.0.3" +version = "0.0.4" source = { editable = "." } dependencies = [ { name = "griffe" },