Skip to content

Commit 97e455b

Browse files
authored
Merge pull request #205 from restackio/update-error-handling
Update error handling
2 parents 25135c6 + 98dd0ca commit 97e455b

File tree

61 files changed

+701
-539
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+701
-539
lines changed

agent_apis/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ dependencies = [
1111
"python-dotenv==1.0.1",
1212
"openai>=1.61.0",
1313
"aiohttp>=3.11.12",
14-
"restack-ai>=0.0.77",
14+
"restack-ai>=0.0.78",
1515
]
1616

1717
[project.scripts]

agent_apis/src/functions/llm.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from dotenv import load_dotenv
55
from openai import OpenAI
6-
from restack_ai.function import FunctionFailure, function, log
6+
from restack_ai.function import NonRetryableError, function, log
77

88
load_dotenv()
99

@@ -47,4 +47,4 @@ async def llm(function_input: FunctionInputParams) -> str:
4747
return response.choices[0].message.content
4848
except Exception as e:
4949
error_message = "llm function failed"
50-
raise FunctionFailure(error_message, non_retryable=True) from e
50+
raise NonRetryableError(error_message) from e

agent_apis/src/functions/weather.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import aiohttp
2-
from restack_ai.function import function, log
2+
from restack_ai.function import NonRetryableError, function, log
33

44
HTTP_OK = 200
55

@@ -21,6 +21,6 @@ async def weather() -> str:
2121
return str(data)
2222
error_message = f"Error: {response.status}"
2323
raise_exception(error_message)
24-
except Exception:
25-
log.error("Error: {e}")
26-
raise
24+
except Exception as e:
25+
error_message = f"Error: {e}"
26+
raise NonRetryableError(error_message) from e

agent_apis/src/workflows/multistep.py

+26-18
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import timedelta
22

33
from pydantic import BaseModel, Field
4-
from restack_ai.workflow import import_functions, log, workflow
4+
from restack_ai.workflow import NonRetryableError, import_functions, log, workflow
55

66
with import_functions():
77
from src.functions.llm import FunctionInputParams, llm
@@ -20,20 +20,28 @@ async def run(self, workflow_input: WorkflowInputParams) -> dict:
2020
user_content = f"Greet this person {workflow_input.name}"
2121

2222
# Step 1 get weather data
23-
weather_data = await workflow.step(
24-
function=weather, start_to_close_timeout=timedelta(seconds=120)
25-
)
26-
27-
# Step 2 Generate greeting with LLM based on name and weather data
28-
29-
llm_message = await workflow.step(
30-
function=llm,
31-
function_input=FunctionInputParams(
32-
system_content=f"You are a personal assitant and have access to weather data {weather_data}. Always greet person with relevant info from weather data",
33-
user_content=user_content,
34-
model="gpt-4o-mini",
35-
),
36-
start_to_close_timeout=timedelta(seconds=120),
37-
)
38-
log.info("MultistepWorkflow completed", llm_message=llm_message)
39-
return {"message": llm_message, "weather": weather_data}
23+
try:
24+
weather_data = await workflow.step(
25+
weather, start_to_close_timeout=timedelta(seconds=120)
26+
)
27+
except Exception as e:
28+
error_message = f"Error during weather: {e}"
29+
raise NonRetryableError(error_message) from e
30+
else:
31+
# Step 2 Generate greeting with LLM based on name and weather data
32+
try:
33+
llm_message = await workflow.step(
34+
function=llm,
35+
function_input=FunctionInputParams(
36+
system_content=f"You are a personal assitant and have access to weather data {weather_data}. Always greet person with relevant info from weather data",
37+
user_content=user_content,
38+
model="gpt-4o-mini",
39+
),
40+
start_to_close_timeout=timedelta(seconds=120),
41+
)
42+
except Exception as e:
43+
error_message = f"Error during llm: {e}"
44+
raise NonRetryableError(error_message) from e
45+
else:
46+
log.info("MultistepWorkflow completed", llm_message=llm_message)
47+
return {"message": llm_message, "weather": weather_data}

agent_chat/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ dependencies = [
1010
"watchfiles>=1.0.4",
1111
"python-dotenv==1.0.1",
1212
"openai>=1.61.0",
13-
"restack-ai>=0.0.77",
13+
"restack-ai>=0.0.78",
1414
]
1515

1616
[project.scripts]

agent_chat/src/agents/agent.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import timedelta
22

33
from pydantic import BaseModel
4-
from restack_ai.agent import agent, import_functions, log, AgentError
4+
from restack_ai.agent import NonRetryableError, agent, import_functions, log
55

66
with import_functions():
77
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
@@ -23,22 +23,22 @@ def __init__(self) -> None:
2323

2424
@agent.event
2525
async def messages(self, messages_event: MessagesEvent) -> list[Message]:
26+
log.info(f"Received messages: {messages_event.messages}")
27+
self.messages.extend(messages_event.messages)
28+
29+
log.info(f"Calling llm_chat with messages: {self.messages}")
2630
try:
27-
log.info(f"Received messages: {messages_event.messages}")
28-
self.messages.extend(messages_event.messages)
29-
30-
log.info(f"Calling llm_chat with messages: {self.messages}")
3131
assistant_message = await agent.step(
3232
function=llm_chat,
3333
function_input=LlmChatInput(messages=self.messages),
3434
start_to_close_timeout=timedelta(seconds=120),
3535
)
36-
36+
except Exception as e:
37+
error_message = f"Error during llm_chat: {e}"
38+
raise NonRetryableError(error_message) from e
39+
else:
3740
self.messages.append(assistant_message)
3841
return self.messages
39-
except Exception as e:
40-
log.error(f"Error in messages: {e}")
41-
raise AgentError(f"Error in messages: {e}")
4242

4343
@agent.event
4444
async def end(self, end: EndEvent) -> EndEvent:

agent_chat/src/functions/llm_chat.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from dotenv import load_dotenv
55
from openai import OpenAI
66
from pydantic import BaseModel
7-
from restack_ai.function import FunctionFailure, function, log
7+
from restack_ai.function import NonRetryableError, function, log
88

99
load_dotenv()
1010

@@ -22,7 +22,7 @@ class LlmChatInput(BaseModel):
2222

2323
def raise_exception(message: str) -> None:
2424
log.error(message)
25-
raise FunctionFailure(message, non_retryable=True)
25+
raise NonRetryableError(message)
2626

2727

2828
@function.defn()
@@ -48,8 +48,8 @@ async def llm_chat(agent_input: LlmChatInput) -> dict[str, str]:
4848
messages=agent_input.messages,
4949
)
5050
except Exception as e:
51-
log.error("llm_chat function failed", error=e)
52-
raise
51+
error_message = f"LLM chat failed: {e}"
52+
raise NonRetryableError(error_message) from e
5353
else:
5454
log.info(
5555
"llm_chat function completed", assistant_raw_response=assistant_raw_response

agent_rag/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ dependencies = [
1111
"watchfiles>=1.0.4",
1212
"requests==2.32.3",
1313
"python-dotenv==1.0.1",
14-
"restack-ai>=0.0.77",
14+
"restack-ai>=0.0.78",
1515
]
1616

1717
[project.scripts]

agent_rag/src/agents/chat_rag.py

+30-24
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import timedelta
22

33
from pydantic import BaseModel
4-
from restack_ai.agent import agent, import_functions, log
4+
from restack_ai.agent import NonRetryableError, agent, import_functions, log
55

66
with import_functions():
77
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
@@ -26,30 +26,36 @@ def __init__(self) -> None:
2626
async def messages(self, messages_event: MessagesEvent) -> list[Message]:
2727
log.info(f"Received messages: {messages_event.messages}")
2828
self.messages.extend(messages_event.messages)
29-
30-
sales_info = await agent.step(
31-
function=lookup_sales, start_to_close_timeout=timedelta(seconds=120)
32-
)
33-
34-
system_content = f"You are a helpful assistant that can help with sales data. Here is the sales information: {sales_info}"
35-
36-
completion = await agent.step(
37-
function=llm_chat,
38-
function_input=LlmChatInput(
39-
messages=self.messages, system_content=system_content
40-
),
41-
start_to_close_timeout=timedelta(seconds=120),
42-
)
43-
44-
log.info(f"completion: {completion}")
45-
46-
self.messages.append(
47-
Message(
48-
role="assistant", content=completion.choices[0].message.content or ""
29+
try:
30+
sales_info = await agent.step(
31+
function=lookup_sales, start_to_close_timeout=timedelta(seconds=120)
4932
)
50-
)
51-
52-
return self.messages
33+
except Exception as e:
34+
error_message = f"Error during lookup_sales: {e}"
35+
raise NonRetryableError(error_message) from e
36+
else:
37+
system_content = f"You are a helpful assistant that can help with sales data. Here is the sales information: {sales_info}"
38+
39+
try:
40+
completion = await agent.step(
41+
function=llm_chat,
42+
function_input=LlmChatInput(
43+
messages=self.messages, system_content=system_content
44+
),
45+
start_to_close_timeout=timedelta(seconds=120),
46+
)
47+
except Exception as e:
48+
error_message = f"Error during llm_chat: {e}"
49+
raise NonRetryableError(error_message) from e
50+
else:
51+
log.info(f"completion: {completion}")
52+
self.messages.append(
53+
Message(
54+
role="assistant", content=completion.choices[0].message.content or ""
55+
)
56+
)
57+
58+
return self.messages
5359

5460
@agent.event
5561
async def end(self) -> EndEvent:

agent_rag/src/functions/llm_chat.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from openai import OpenAI
66
from openai.types.chat.chat_completion import ChatCompletion
77
from pydantic import BaseModel
8-
from restack_ai.function import FunctionFailure, function, log
8+
from restack_ai.function import NonRetryableError, function, log
99

1010
load_dotenv()
1111

@@ -23,7 +23,7 @@ class LlmChatInput(BaseModel):
2323

2424
def raise_exception(message: str) -> None:
2525
log.error(message)
26-
raise FunctionFailure(message, non_retryable=True)
26+
raise NonRetryableError(message)
2727

2828

2929
@function.defn()
@@ -49,8 +49,8 @@ async def llm_chat(function_input: LlmChatInput) -> ChatCompletion:
4949
messages=function_input.messages,
5050
)
5151
except Exception as e:
52-
log.error("llm_chat function failed", error=e)
53-
raise
52+
error_message = f"LLM chat failed: {e}"
53+
raise NonRetryableError(error_message) from e
5454
else:
5555
log.info("llm_chat function completed", response=response)
5656
return response

agent_rag/src/functions/lookup_sales.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from pydantic import BaseModel
2-
from restack_ai.function import function, log
2+
from restack_ai.function import NonRetryableError, function, log
33

44

55
class SalesItem(BaseModel):
@@ -85,5 +85,5 @@ async def lookup_sales() -> str:
8585

8686
return str(items)
8787
except Exception as e:
88-
log.error("lookup_sales function failed", error=e)
89-
raise
88+
error_message = f"lookup_sales function failed: {e}"
89+
raise NonRetryableError(error_message) from e

agent_stream/pyproject.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ dependencies = [
1010
"watchfiles>=1.0.4",
1111
"python-dotenv==1.0.1",
1212
"openai>=1.61.0",
13-
"restack-ai>=0.0.77",
1413
"livekit-api>=0.8.2",
14+
"restack-ai>=0.0.78",
1515
]
1616

1717
[project.scripts]
@@ -26,6 +26,7 @@ include = ["src"]
2626
[tool.hatch.build.targets.wheel]
2727
include = ["src"]
2828

29+
2930
[build-system]
3031
requires = ["hatchling"]
3132
build-backend = "hatchling.build"

agent_stream/src/agents/agent.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from datetime import timedelta
33

44
from pydantic import BaseModel
5-
from restack_ai.agent import agent, import_functions, log
5+
from restack_ai.agent import NonRetryableError, agent, import_functions, log
66

77
with import_functions():
88
from src.functions.llm_chat import LlmChatInput, Message, llm_chat
@@ -32,13 +32,18 @@ async def messages(self, messages_event: MessagesEvent) -> list[Message]:
3232
log.info(f"Received message: {messages_event.messages}")
3333
self.messages.extend(messages_event.messages)
3434

35-
assistant_message = await agent.step(
36-
function=llm_chat,
37-
function_input=LlmChatInput(messages=self.messages),
38-
start_to_close_timeout=timedelta(seconds=120),
39-
)
40-
self.messages.append(Message(role="assistant", content=str(assistant_message)))
41-
return self.messages
35+
try:
36+
assistant_message = await agent.step(
37+
function=llm_chat,
38+
function_input=LlmChatInput(messages=self.messages),
39+
start_to_close_timeout=timedelta(seconds=120),
40+
)
41+
except Exception as e:
42+
error_message = f"Error during llm_chat: {e}"
43+
raise NonRetryableError(error_message) from e
44+
else:
45+
self.messages.append(Message(role="assistant", content=str(assistant_message)))
46+
return self.messages
4247

4348
@agent.event
4449
async def end(self, end: EndEvent) -> EndEvent:

agent_stream/src/functions/llm_chat.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from openai import OpenAI
55
from pydantic import BaseModel, Field
6-
from restack_ai.function import function, log, stream_to_websocket
6+
from restack_ai.function import NonRetryableError, function, stream_to_websocket
77

88
from src.client import api_address
99

@@ -49,5 +49,5 @@ async def llm_chat(function_input: LlmChatInput) -> str:
4949
return await stream_to_websocket(api_address=api_address, data=response)
5050

5151
except Exception as e:
52-
log.error("llm_chat function failed", error=str(e))
53-
raise
52+
error_message = f"llm_chat function failed: {e}"
53+
raise NonRetryableError(error_message) from e

agent_telephony/twilio/agent_twilio/pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ dependencies = [
1010
"watchfiles>=1.0.4",
1111
"python-dotenv==1.0.1",
1212
"openai>=1.61.0",
13-
"restack-ai>=0.0.77",
1413
"livekit-api>=0.8.2",
14+
"restack-ai>=0.0.78",
1515
]
1616

1717
[project.scripts]

0 commit comments

Comments
 (0)