Skip to content

Commit 831c42b

Browse files
dlqqqJasonWeill
andauthored
Log exceptions in /generate to a file (#431)
* rename process_message() => on_message() * remove unused import * add handle_exc() method in BaseChatHandler * add _default_handle_exc() to handle excs from handle_exc() * log exceptions from /generate to a file * pre-commit * improve call to action in GenerateCH.handle_exc() * prefer period over colon in timestamped filenames Co-authored-by: Jason Weill <[email protected]> --------- Co-authored-by: Jason Weill <[email protected]>
1 parent e586620 commit 831c42b

File tree

8 files changed

+67
-26
lines changed

8 files changed

+67
-26
lines changed

packages/jupyter-ai/jupyter_ai/chat_handlers/ask.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def create_llm_chain(
4848
verbose=False,
4949
)
5050

51-
async def _process_message(self, message: HumanChatMessage):
51+
async def process_message(self, message: HumanChatMessage):
5252
args = self.parse_args(message)
5353
if args is None:
5454
return

packages/jupyter-ai/jupyter_ai/chat_handlers/base.py

Lines changed: 43 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from jupyter_ai.config_manager import ConfigManager, Logger
1010
from jupyter_ai.models import AgentChatMessage, HumanChatMessage
1111
from jupyter_ai_magics.providers import BaseProvider
12-
from langchain.chat_models.base import BaseChatModel
1312

1413
if TYPE_CHECKING:
1514
from jupyter_ai.handlers import RootChatHandler
@@ -33,20 +32,54 @@ def __init__(
3332
self.llm_params = None
3433
self.llm_chain = None
3534

36-
async def process_message(self, message: HumanChatMessage):
37-
"""Processes the message passed by the root chat handler."""
35+
async def on_message(self, message: HumanChatMessage):
36+
"""
37+
Method which receives a human message and processes it via
38+
`self.process_message()`, calling `self.handle_exc()` when an exception
39+
is raised. This method is called by RootChatHandler when it routes a
40+
human message to this chat handler.
41+
"""
3842
try:
39-
await self._process_message(message)
43+
await self.process_message(message)
4044
except Exception as e:
41-
formatted_e = traceback.format_exc()
42-
response = f"Sorry, something went wrong and I wasn't able to index that path.\n\n```\n{formatted_e}\n```"
43-
self.reply(response, message)
45+
try:
46+
# we try/except `handle_exc()` in case it was overriden and
47+
# raises an exception by accident.
48+
await self.handle_exc(e, message)
49+
except Exception as e:
50+
await self._default_handle_exc(e, message)
4451

45-
async def _process_message(self, message: HumanChatMessage):
46-
"""Processes the message passed by the `Router`"""
52+
async def process_message(self, message: HumanChatMessage):
53+
"""
54+
Processes a human message routed to this chat handler. Chat handlers
55+
(subclasses) must implement this method. Don't forget to call
56+
`self.reply(<response>, message)` at the end!
57+
58+
The method definition does not need to be wrapped in a try/except block;
59+
any exceptions raised here are caught by `self.handle_exc()`.
60+
"""
4761
raise NotImplementedError("Should be implemented by subclasses.")
4862

49-
def reply(self, response, human_msg: Optional[HumanChatMessage] = None):
63+
async def handle_exc(self, e: Exception, message: HumanChatMessage):
64+
"""
65+
Handles an exception raised by `self.process_message()`. A default
66+
implementation is provided, however chat handlers (subclasses) should
67+
implement this method to provide a more helpful error response.
68+
"""
69+
self._default_handle_exc(e, message)
70+
71+
async def _default_handle_exc(self, e: Exception, message: HumanChatMessage):
72+
"""
73+
The default definition of `handle_exc()`. This is the default used when
74+
the `handle_exc()` excepts.
75+
"""
76+
formatted_e = traceback.format_exc()
77+
response = (
78+
f"Sorry, an error occurred. Details below:\n\n```\n{formatted_e}\n```"
79+
)
80+
self.reply(response, message)
81+
82+
def reply(self, response: str, human_msg: Optional[HumanChatMessage] = None):
5083
agent_msg = AgentChatMessage(
5184
id=uuid4().hex,
5285
time=time.time(),

packages/jupyter-ai/jupyter_ai/chat_handlers/clear.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ def __init__(self, chat_history: List[ChatMessage], *args, **kwargs):
1010
super().__init__(*args, **kwargs)
1111
self._chat_history = chat_history
1212

13-
async def _process_message(self, _):
13+
async def process_message(self, _):
1414
self._chat_history.clear()
1515
for handler in self._root_chat_handlers.values():
1616
if not handler:

packages/jupyter-ai/jupyter_ai/chat_handlers/default.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def clear_memory(self):
8282
if self.chat_history:
8383
self.chat_history.clear()
8484

85-
async def _process_message(self, message: HumanChatMessage):
85+
async def process_message(self, message: HumanChatMessage):
8686
self.get_llm_chain()
8787
response = await self.llm_chain.apredict(input=message.body, stop=["\nHuman:"])
8888
self.reply(response, message)

packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
import asyncio
22
import os
3+
import time
4+
import traceback
5+
from pathlib import Path
36
from typing import Dict, List, Optional, Type
47

58
import nbformat
@@ -248,18 +251,23 @@ async def _generate_notebook(self, prompt: str):
248251
nbformat.write(notebook, final_path)
249252
return final_path
250253

251-
async def _process_message(self, message: HumanChatMessage):
254+
async def process_message(self, message: HumanChatMessage):
252255
self.get_llm_chain()
253256

254257
# first send a verification message to user
255258
response = "👍 Great, I will get started on your notebook. It may take a few minutes, but I will reply here when the notebook is ready. In the meantime, you can continue to ask me other questions."
256259
self.reply(response, message)
257260

258-
try:
259-
final_path = await self._generate_notebook(prompt=message.body)
260-
response = f"""🎉 I have created your notebook and saved it to the location {final_path}. I am still learning how to create notebooks, so please review all code before running it."""
261-
except Exception as e:
262-
self.log.exception(e)
263-
response = "An error occurred while generating the notebook. Try running the /generate task again."
264-
finally:
265-
self.reply(response, message)
261+
final_path = await self._generate_notebook(prompt=message.body)
262+
response = f"""🎉 I have created your notebook and saved it to the location {final_path}. I am still learning how to create notebooks, so please review all code before running it."""
263+
self.reply(response, message)
264+
265+
async def handle_exc(self, e: Exception, message: HumanChatMessage):
266+
timestamp = time.strftime("%Y-%m-%d-%H.%M.%S")
267+
log_path = Path(f"jupyter-ai-logs/generate-{timestamp}.log")
268+
log_path.parent.mkdir(parents=True, exist_ok=True)
269+
with log_path.open("w") as log:
270+
traceback.print_exc(file=log)
271+
272+
response = f"An error occurred while generating the notebook. The error details have been saved to `./{log_path}`.\n\nTry running `/generate` again, as some language models require multiple attempts before a notebook is generated."
273+
self.reply(response, message)

packages/jupyter-ai/jupyter_ai/chat_handlers/help.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,5 +32,5 @@ class HelpChatHandler(BaseChatHandler):
3232
def __init__(self, *args, **kwargs):
3333
super().__init__(*args, **kwargs)
3434

35-
async def _process_message(self, message: HumanChatMessage):
35+
async def process_message(self, message: HumanChatMessage):
3636
self.reply(HELP_MESSAGE, message)

packages/jupyter-ai/jupyter_ai/chat_handlers/learn.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def _load(self):
7777
except Exception as e:
7878
self.log.error("Could not load vector index from disk.")
7979

80-
async def _process_message(self, message: HumanChatMessage):
80+
async def process_message(self, message: HumanChatMessage):
8181
# If no embedding provider has been selected
8282
em_provider_cls, em_provider_args = self.get_embedding_provider()
8383
if not em_provider_cls:

packages/jupyter-ai/jupyter_ai/handlers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -222,9 +222,9 @@ async def _route(self, message):
222222

223223
start = time.time()
224224
if is_command:
225-
await self.chat_handlers[command].process_message(message)
225+
await self.chat_handlers[command].on_message(message)
226226
else:
227-
await default.process_message(message)
227+
await default.on_message(message)
228228

229229
latency_ms = round((time.time() - start) * 1000)
230230
command_readable = "Default" if command == "default" else command

0 commit comments

Comments
 (0)