-
Notifications
You must be signed in to change notification settings - Fork 535
/
Copy pathtest_transport.py
377 lines (293 loc) · 11.4 KB
/
test_transport.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
# coding: utf-8
import logging
import pickle
import gzip
import io
from datetime import datetime, timedelta
import pytest
from collections import namedtuple
from werkzeug.wrappers import Request, Response
from pytest_localserver.http import WSGIServer
from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
from sentry_sdk._compat import datetime_utcnow
from sentry_sdk.transport import _parse_rate_limits
from sentry_sdk.envelope import Envelope, parse_json
from sentry_sdk.integrations.logging import LoggingIntegration
CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"])
class CapturingServer(WSGIServer):
def __init__(self, host="127.0.0.1", port=0, ssl_context=None):
WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context)
self.code = 204
self.headers = {}
self.captured = []
def respond_with(self, code=200, headers=None):
self.code = code
if headers:
self.headers = headers
def clear_captured(self):
del self.captured[:]
def __call__(self, environ, start_response):
"""
This is the WSGI application.
"""
request = Request(environ)
event = envelope = None
if request.headers.get("content-encoding") == "gzip":
rdr = gzip.GzipFile(fileobj=io.BytesIO(request.data))
compressed = True
else:
rdr = io.BytesIO(request.data)
compressed = False
if request.mimetype == "application/json":
event = parse_json(rdr.read())
else:
envelope = Envelope.deserialize_from(rdr)
self.captured.append(
CapturedData(
path=request.path,
event=event,
envelope=envelope,
compressed=compressed,
)
)
response = Response(status=self.code)
response.headers.extend(self.headers)
return response(environ, start_response)
@pytest.fixture
def capturing_server(request):
server = CapturingServer()
server.start()
request.addfinalizer(server.stop)
return server
@pytest.fixture
def make_client(request, capturing_server):
def inner(**kwargs):
return Client(
"http://foobar@{}/132".format(capturing_server.url[len("http://") :]),
**kwargs
)
return inner
@pytest.mark.forked
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("client_flush_method", ["close", "flush"])
@pytest.mark.parametrize("use_pickle", (True, False))
@pytest.mark.parametrize("compressionlevel", (0, 9))
def test_transport_works(
capturing_server,
request,
capsys,
caplog,
debug,
make_client,
client_flush_method,
use_pickle,
compressionlevel,
maybe_monkeypatched_threading,
):
caplog.set_level(logging.DEBUG)
client = make_client(
debug=debug,
_experiments={
"transport_zlib_compression_level": compressionlevel,
},
)
if use_pickle:
client = pickle.loads(pickle.dumps(client))
Hub.current.bind_client(client)
request.addfinalizer(lambda: Hub.current.bind_client(None))
add_breadcrumb(level="info", message="i like bread", timestamp=datetime_utcnow())
capture_message("löl")
getattr(client, client_flush_method)()
out, err = capsys.readouterr()
assert not err and not out
assert capturing_server.captured
assert capturing_server.captured[0].compressed == (compressionlevel > 0)
assert any("Sending event" in record.msg for record in caplog.records) == debug
@pytest.mark.parametrize(
"num_pools,expected_num_pools",
(
(None, 2),
(2, 2),
(10, 10),
),
)
def test_transport_num_pools(make_client, num_pools, expected_num_pools):
_experiments = {}
if num_pools is not None:
_experiments["transport_num_pools"] = num_pools
client = make_client(_experiments=_experiments)
options = client.transport._get_pool_options([])
assert options["num_pools"] == expected_num_pools
def test_transport_infinite_loop(capturing_server, request, make_client):
client = make_client(
debug=True,
# Make sure we cannot create events from our own logging
integrations=[LoggingIntegration(event_level=logging.DEBUG)],
)
with Hub(client):
capture_message("hi")
client.flush()
assert len(capturing_server.captured) == 1
NOW = datetime(2014, 6, 2)
@pytest.mark.parametrize(
"input,expected",
[
# Invalid rate limits
("", {}),
("invalid", {}),
(",,,", {}),
(
"42::organization, invalid, 4711:foobar;transaction;security:project",
{
None: NOW + timedelta(seconds=42),
"transaction": NOW + timedelta(seconds=4711),
"security": NOW + timedelta(seconds=4711),
# Unknown data categories
"foobar": NOW + timedelta(seconds=4711),
},
),
(
"4711:foobar;;transaction:organization",
{
"transaction": NOW + timedelta(seconds=4711),
# Unknown data categories
"foobar": NOW + timedelta(seconds=4711),
"": NOW + timedelta(seconds=4711),
},
),
],
)
def test_parse_rate_limits(input, expected):
assert dict(_parse_rate_limits(input, now=NOW)) == expected
def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
client = make_client()
capturing_server.respond_with(code=429, headers={"Retry-After": "4"})
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set([None])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "event"})
client.flush()
assert not capturing_server.captured
@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits(
capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
client = make_client(send_client_reports=False)
captured_outcomes = []
def record_lost_event(reason, data_category=None, item=None):
if data_category is None:
data_category = item.data_category
return captured_outcomes.append((reason, data_category))
monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event)
capturing_server.respond_with(
code=response_code,
headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"},
)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set(["transaction"])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
client.flush()
assert not capturing_server.captured
client.capture_event({"type": "event"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/store/"
assert captured_outcomes == [
("ratelimit_backoff", "transaction"),
("ratelimit_backoff", "transaction"),
]
@pytest.mark.parametrize("response_code", [200, 429])
def test_data_category_limits_reporting(
capturing_server, capsys, caplog, response_code, make_client, monkeypatch
):
client = make_client(send_client_reports=True)
capturing_server.respond_with(
code=response_code,
headers={
"X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization"
},
)
outcomes_enabled = False
real_fetch = client.transport._fetch_pending_client_report
def intercepting_fetch(*args, **kwargs):
if outcomes_enabled:
return real_fetch(*args, **kwargs)
monkeypatch.setattr(
client.transport, "_fetch_pending_client_report", intercepting_fetch
)
# get rid of threading making things hard to track
monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set(["attachment", "transaction"])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
capturing_server.clear_captured()
# flush out the events but don't flush the client reports
client.flush()
client.transport._last_client_report_sent = 0
outcomes_enabled = True
scope = Scope()
scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
client.capture_event({"type": "error"}, scope=scope)
client.flush()
# this goes out with an extra envelope because it's flushed after the last item
# that is normally in the queue. This is quite funny in a way because it means
# that the envelope that caused its own over quota report (an error with an
# attachment) will include its outcome since it's pending.
assert len(capturing_server.captured) == 1
envelope = capturing_server.captured[0].envelope
assert envelope.items[0].type == "event"
assert envelope.items[1].type == "client_report"
report = parse_json(envelope.items[1].get_bytes())
assert sorted(report["discarded_events"], key=lambda x: x["quantity"]) == [
{"category": "transaction", "reason": "ratelimit_backoff", "quantity": 2},
{"category": "attachment", "reason": "ratelimit_backoff", "quantity": 11},
]
capturing_server.clear_captured()
# here we sent a normal event
client.capture_event({"type": "transaction"})
client.capture_event({"type": "error", "release": "foo"})
client.flush()
assert len(capturing_server.captured) == 2
event = capturing_server.captured[0].event
assert event["type"] == "error"
assert event["release"] == "foo"
envelope = capturing_server.captured[1].envelope
assert envelope.items[0].type == "client_report"
report = parse_json(envelope.items[0].get_bytes())
assert report["discarded_events"] == [
{"category": "transaction", "reason": "ratelimit_backoff", "quantity": 1},
]
@pytest.mark.parametrize("response_code", [200, 429])
def test_complex_limits_without_data_category(
capturing_server, capsys, caplog, response_code, make_client
):
client = make_client()
capturing_server.respond_with(
code=response_code,
headers={"X-Sentry-Rate-Limits": "4711::organization"},
)
client.capture_event({"type": "transaction"})
client.flush()
assert len(capturing_server.captured) == 1
assert capturing_server.captured[0].path == "/api/132/envelope/"
capturing_server.clear_captured()
assert set(client.transport._disabled_until) == set([None])
client.capture_event({"type": "transaction"})
client.capture_event({"type": "transaction"})
client.capture_event({"type": "event"})
client.flush()
assert len(capturing_server.captured) == 0