Skip to content

Commit 9a0f4d5

Browse files
committed
Update caching to use JSON instead of pkl
1 parent a15467d commit 9a0f4d5

File tree

5 files changed

+107
-56
lines changed

5 files changed

+107
-56
lines changed

backend/api/asset_liability.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,11 @@
99
router = APIRouter()
1010

1111

12-
async def get_asset_liability_matrix(
13-
snapshot_path: str, vat: Vat, mode: int, perp_market_index: int
12+
async def _get_asset_liability_matrix(
13+
snapshot_path: str,
14+
vat: Vat,
15+
mode: int,
16+
perp_market_index: int,
1417
) -> dict:
1518
print("==> Getting asset liability matrix...")
1619
res, df = await get_matrix(vat, mode, perp_market_index)
@@ -28,8 +31,8 @@ async def get_asset_liability_matrix(
2831
async def get_asset_liability_matrix(
2932
request: BackendRequest, mode: int, perp_market_index: int
3033
):
31-
return await get_asset_liability_matrix(
32-
request.state.current_pickle_path,
34+
return await _get_asset_liability_matrix(
35+
request.state.backend_state.current_pickle_path,
3336
request.state.backend_state.vat,
3437
mode,
3538
perp_market_index,

backend/app.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,15 +52,21 @@ def clean_cache(state: BackendState) -> None:
5252
pickles.sort(key=os.path.getmtime)
5353
for pickle in pickles[:-5]:
5454
print(f"deleting {pickle}")
55-
shutil.rmtree(pickle)
55+
try:
56+
shutil.rmtree(pickle)
57+
except Exception as e:
58+
print(f"Error deleting {pickle}: {e}")
5659

5760
cache_files = glob.glob("cache/*")
5861
if len(cache_files) > 35:
5962
print("cache folder has more than 35 files, deleting old ones")
6063
cache_files.sort(key=os.path.getmtime)
6164
for cache_file in cache_files[:-35]:
6265
print(f"deleting {cache_file}")
63-
os.remove(cache_file)
66+
try:
67+
os.remove(cache_file)
68+
except Exception as e:
69+
print(f"Error deleting {cache_file}: {e}")
6470

6571

6672
@repeat_every(seconds=60 * 8, wait_first=True)

backend/middleware/cache_middleware.py

Lines changed: 74 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import asyncio
22
import glob
33
import hashlib
4+
import json
45
import os
5-
import pickle
6-
from typing import Any, Callable, Dict, Optional
6+
from typing import Callable, Dict, Optional
77

88
from backend.state import BackendRequest
99
from backend.state import BackendState
@@ -23,67 +23,67 @@ def __init__(self, app: ASGIApp, state: BackendState, cache_dir: str = "cache"):
2323
os.makedirs(self.cache_dir)
2424

2525
async def dispatch(self, request: BackendRequest, call_next: Callable):
26-
if request.url.path.startswith("/api/snapshot"):
27-
return await call_next(request)
28-
if request.url.path.startswith("/api/price_shock"):
29-
return await call_next(request)
3026
if not request.url.path.startswith("/api"):
3127
return await call_next(request)
32-
if self.state.current_pickle_path == "bootstrap":
33-
return await call_next(request)
3428

3529
current_pickle = self.state.current_pickle_path
3630
previous_pickle = self._get_previous_pickle()
3731

38-
# Try to serve data from the current (latest) pickle first
3932
current_cache_key = self._generate_cache_key(request, current_pickle)
40-
current_cache_file = os.path.join(self.cache_dir, f"{current_cache_key}.pkl")
33+
current_cache_file = os.path.join(self.cache_dir, f"{current_cache_key}.json")
4134

4235
if os.path.exists(current_cache_file):
43-
print(f"Serving latest data for {request.url.path}")
44-
with open(current_cache_file, "rb") as f:
45-
response_data = pickle.load(f)
46-
47-
return Response(
48-
content=response_data["content"],
49-
status_code=response_data["status_code"],
50-
headers=dict(response_data["headers"], **{"X-Cache-Status": "Fresh"}),
51-
)
36+
return self._serve_cached_response(current_cache_file, "Fresh")
5237

53-
# If no data in current pickle, try the previous pickle
5438
if previous_pickle:
5539
previous_cache_key = self._generate_cache_key(request, previous_pickle)
5640
previous_cache_file = os.path.join(
57-
self.cache_dir, f"{previous_cache_key}.pkl"
41+
self.cache_dir, f"{previous_cache_key}.json"
5842
)
5943

6044
if os.path.exists(previous_cache_file):
61-
print(f"Serving stale data for {request.url.path}")
62-
with open(previous_cache_file, "rb") as f:
63-
response_data = pickle.load(f)
64-
65-
# Prepare background task
66-
background_tasks = BackgroundTasks()
67-
background_tasks.add_task(
68-
self._fetch_and_cache,
45+
return await self._serve_stale_response(
46+
previous_cache_file,
6947
request,
7048
call_next,
7149
current_cache_key,
7250
current_cache_file,
7351
)
7452

75-
response = Response(
76-
content=response_data["content"],
77-
status_code=response_data["status_code"],
78-
headers=dict(
79-
response_data["headers"], **{"X-Cache-Status": "Stale"}
80-
),
81-
)
82-
response.background = background_tasks
83-
return response
53+
return await self._serve_miss_response(
54+
request, call_next, current_cache_key, current_cache_file
55+
)
8456

85-
# If no data available, return an empty response and fetch fresh data in the background
86-
print(f"No data available for {request.url.path}")
57+
def _serve_cached_response(self, cache_file: str, cache_status: str):
58+
print(f"Serving {cache_status.lower()} data")
59+
with open(cache_file, "r") as f:
60+
response_data = json.load(f)
61+
62+
content = json.dumps(response_data["content"]).encode("utf-8")
63+
headers = {
64+
k: v
65+
for k, v in response_data["headers"].items()
66+
if k.lower() != "content-length"
67+
}
68+
headers["Content-Length"] = str(len(content))
69+
headers["X-Cache-Status"] = cache_status
70+
71+
return Response(
72+
content=content,
73+
status_code=response_data["status_code"],
74+
headers=headers,
75+
media_type="application/json",
76+
)
77+
78+
async def _serve_stale_response(
79+
self,
80+
cache_file: str,
81+
request: BackendRequest,
82+
call_next: Callable,
83+
current_cache_key: str,
84+
current_cache_file: str,
85+
):
86+
response = self._serve_cached_response(cache_file, "Stale")
8787
background_tasks = BackgroundTasks()
8888
background_tasks.add_task(
8989
self._fetch_and_cache,
@@ -92,12 +92,32 @@ async def dispatch(self, request: BackendRequest, call_next: Callable):
9292
current_cache_key,
9393
current_cache_file,
9494
)
95+
response.background = background_tasks
96+
return response
97+
98+
async def _serve_miss_response(
99+
self,
100+
request: BackendRequest,
101+
call_next: Callable,
102+
cache_key: str,
103+
cache_file: str,
104+
):
105+
print(f"No data available for {request.url.path}")
106+
background_tasks = BackgroundTasks()
107+
background_tasks.add_task(
108+
self._fetch_and_cache,
109+
request,
110+
call_next,
111+
cache_key,
112+
cache_file,
113+
)
114+
content = json.dumps({"result": "miss"}).encode("utf-8")
95115

96-
# Return an empty response immediately
97116
response = Response(
98-
content='{"result": "miss"}',
99-
status_code=200, # No Content
100-
headers={"X-Cache-Status": "Miss"},
117+
content=content,
118+
status_code=200,
119+
headers={"X-Cache-Status": "Miss", "Content-Length": str(len(content))},
120+
media_type="application/json",
101121
)
102122
response.background = background_tasks
103123
return response
@@ -120,15 +140,21 @@ async def _fetch_and_cache(
120140
response_body = b""
121141
async for chunk in response.body_iterator:
122142
response_body += chunk
143+
144+
body_content = json.loads(response_body.decode())
123145
response_data = {
124-
"content": response_body,
146+
"content": body_content,
125147
"status_code": response.status_code,
126-
"headers": dict(response.headers),
148+
"headers": {
149+
k: v
150+
for k, v in response.headers.items()
151+
if k.lower() != "content-length"
152+
},
127153
}
128154

129155
os.makedirs(os.path.dirname(cache_file), exist_ok=True)
130-
with open(cache_file, "wb") as f:
131-
pickle.dump(response_data, f)
156+
with open(cache_file, "w") as f:
157+
json.dump(response_data, f)
132158
print(f"Cached fresh data for {request.url.path}")
133159
else:
134160
print(

gunicorn_config.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,7 @@
88
accesslog = "-"
99
errorlog = "-"
1010
loglevel = "info"
11+
12+
# Restart workers that die unexpectedly
13+
worker_exit_on_restart = True
14+
worker_restart_delay = 2

src/page/asset_liability.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
1+
import json
2+
13
from driftpy.constants.perp_markets import mainnet_perp_market_configs
24
from driftpy.constants.spot_markets import mainnet_spot_market_configs
35
from lib.api import api
46
import pandas as pd
7+
from requests.exceptions import JSONDecodeError
58
import streamlit as st
69

710

@@ -50,8 +53,17 @@ def asset_liab_matrix_page():
5053
st.stop()
5154

5255
except Exception as e:
53-
st.write(e)
54-
st.stop()
56+
if type(e) == JSONDecodeError:
57+
print("HIT A JSONDecodeError...", e)
58+
st.write("Fetching data for the first time...")
59+
st.image(
60+
"https://i.gifer.com/origin/8a/8a47f769c400b0b7d81a8f6f8e09a44a_w200.gif"
61+
)
62+
st.write("Check again in one minute!")
63+
st.stop()
64+
else:
65+
st.write(e)
66+
st.stop()
5567

5668
res = pd.DataFrame(result["res"])
5769
df = pd.DataFrame(result["df"])

0 commit comments

Comments
 (0)