Skip to content

Commit be4ac14

Browse files
committed
Handle starknet event keys/data correctly
1 parent 9d96252 commit be4ac14

File tree

4 files changed

+58
-42
lines changed

4 files changed

+58
-42
lines changed

pyproject.toml

+31-32
Original file line numberDiff line numberDiff line change
@@ -48,38 +48,37 @@ classifiers = [
4848
"Typing :: Typed",
4949
]
5050

51-
dependencies = [
52-
"aiohttp~=3.10",
53-
"aiolimiter~=1.1",
54-
"anyio~=4.4",
55-
"appdirs~=1.4",
56-
"APScheduler~=3.10",
57-
"async-lru~=2.0",
58-
"asyncpg~=0.29",
59-
"click~=8.1",
60-
"datamodel-code-generator~=0.26",
61-
"eth-abi~=5.0",
62-
"lru-dict~=1.3",
63-
"orjson~=3.10",
64-
"prometheus-client~=0.20",
65-
"pycryptodome~=3.20",
66-
"pydantic~=2.9",
67-
"pyhumps~=3.8",
68-
"pysignalr~=1.0",
69-
"python-dotenv~=1.0",
70-
"python-json-logger~=2.0",
71-
"ruamel.yaml~=0.18.6",
72-
"sentry-sdk~=2.16",
73-
"sqlparse~=0.5",
74-
"starknet-py==0.24.0",
75-
"strict-rfc3339~=0.7",
76-
"survey~=5.4",
77-
"tabulate~=0.9",
78-
# NOTE: Heavily patched; don't update without testing.
79-
"tortoise-orm==0.21.7",
80-
"uvloop~=0.20",
81-
"web3~=7.2",
82-
]
51+
[tool.poetry.dependencies]
52+
aiohttp = "~3.10"
53+
aiolimiter = "~1.1"
54+
anyio = "~4.4"
55+
appdirs = "~1.4"
56+
APScheduler = "~3.10"
57+
async-lru = "~2.0"
58+
asyncpg = "~0.29"
59+
click = "~8.1"
60+
datamodel-code-generator = "~0.26"
61+
eth-abi = "~5.0"
62+
lru-dict = "~1.3"
63+
orjson = "~3.10"
64+
prometheus-client = "~0.20"
65+
pycryptodome = "~3.20"
66+
pydantic = "~2.9"
67+
pyhumps = "~3.8"
68+
pysignalr = "~1.0"
69+
python-dotenv = "~1.0"
70+
python-json-logger = "~2.0"
71+
ruamel.yaml = "~0.18.6"
72+
sentry-sdk = "~2.16"
73+
sqlparse = "~0.5"
74+
starknet-py = { git = "https://github.com/m-kus/starknet.py", rev = "8d76ee0af683062bf63c6b32a5fd49608e6b731e" }
75+
strict-rfc3339 = "~0.7"
76+
survey = "~5.4"
77+
tabulate = "~0.9"
78+
# NOTE: Heavily patched; don't update without testing.
79+
tortoise-orm = "0.21.7"
80+
uvloop = "~0.20"
81+
web3 = "~7.2"
8382

8483
[project.optional-dependencies]
8584
migrations = [

requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ sentry-sdk==2.16.0
8383
six==1.16.0
8484
sniffio==1.3.1
8585
sqlparse==0.5.1
86-
starknet-py==0.24.0
86+
git+https://github.com/m-kus/starknet.py@8d76ee0af683062bf63c6b32a5fd49608e6b731e#egg=starknet-py
8787
strict-rfc3339==0.7
8888
survey==5.4.0
8989
sympy==1.11.1

src/dipdup/abi/cairo.py

+17-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
from collections import OrderedDict
34
from functools import cache
45
from typing import TYPE_CHECKING
56
from typing import Any
@@ -16,7 +17,7 @@
1617
from starknet_py.abi.v2 import Abi # type: ignore[import-untyped]
1718
from starknet_py.cairo.data_types import CairoType # type: ignore[import-untyped]
1819
from starknet_py.cairo.data_types import EventType
19-
from starknet_py.serialization import PayloadSerializer # type: ignore[import-untyped]
20+
from starknet_py.serialization import CairoDataSerializer # type: ignore[import-untyped]
2021

2122
from dipdup.package import DipDupPackage
2223

@@ -25,7 +26,7 @@ class CairoEventAbi(TypedDict):
2526
name: str
2627
event_identifier: str
2728
members: dict[str, CairoType]
28-
serializer: PayloadSerializer
29+
sorted_serializers: OrderedDict[str, CairoDataSerializer]
2930

3031

3132
class CairoAbi(TypedDict):
@@ -93,12 +94,25 @@ def convert_abi(package: DipDupPackage) -> dict[str, CairoAbi]:
9394
for name, event_type in parsed_abi.events.items():
9495
if name in converted_abi['events']:
9596
raise NotImplementedError('Multiple events with the same name are not supported')
97+
98+
serializers = serializer_for_event(event_type).serializers
99+
100+
# Event payload is returned from RPC in two arrays: keys (including event selector) and data.
101+
# Since any event field can be marked as key, the original ordering might be broken.
102+
#
103+
# We need to reorder deserializers so that the keys remain in the beginning and
104+
# the rest of the fields are moved towards the end (preserving their inner ordering).
105+
#
106+
# That way we can apply the deserializers to the concatenation of keys (without first element) + data.
107+
sorted_members = event_type.keys + [name for name in serializers if name not in event_type.keys]
108+
sorted_serializers = OrderedDict((name, serializers[name]) for name in sorted_members)
109+
96110
converted_abi['events'].append(
97111
CairoEventAbi(
98112
name=name,
99113
event_identifier=sn_keccak(name),
100114
members=event_type.types,
101-
serializer=serializer_for_event(event_type),
115+
sorted_serializers=sorted_serializers,
102116
)
103117
)
104118
abi_by_typename[contract_typename] = converted_abi

src/dipdup/indexes/starknet_events/matcher.py

+9-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import logging
2-
from collections import deque
2+
from collections import deque, OrderedDict
33
from collections.abc import Iterable
44
from typing import Any
55

@@ -69,15 +69,18 @@ def prepare_event_handler_args(
6969
name=snake_to_pascal(handler_config.name) + 'Payload',
7070
)
7171

72-
serializer = package._cairo_abis.get_event_abi(
72+
event_abi = package._cairo_abis.get_event_abi(
7373
typename=typename,
7474
name=handler_config.name,
75-
)['serializer']
76-
data = [int(s, 16) for s in matched_event.data]
75+
)
76+
77+
# Skipping first key which is the event selector
78+
# Note that some fields might be encoded with more than one felt (complex types)
79+
raw_data = [int(x, 16) for x in matched_event.keys[1:] + matched_event.data]
7780

7881
# holding context for error building
79-
with DeserializationContext.create(data) as context:
80-
data_dict = deserialize_to_dict(serializer.serializers, context)
82+
with DeserializationContext.create(raw_data) as context:
83+
data_dict = deserialize_to_dict(event_abi['sorted_serializers'], context)
8184

8285
typed_payload = parse_object(type_=type_, data=data_dict)
8386
return StarknetEvent(

0 commit comments

Comments
 (0)