8
8
import requests
9
9
10
10
from stats import Statistics
11
- from errors import ApiError , BatchError
11
+ from errors import ApiError
12
12
from utils import guess_timezone
13
13
14
14
import options
15
15
16
16
17
-
18
17
logging_enabled = True
19
18
import logging
20
19
logger = logging .getLogger ('analytics' )
@@ -65,29 +64,32 @@ def request(client, url, data):
65
64
try :
66
65
67
66
response = requests .post (url , data = json .dumps (data ),
68
- headers = {'content-type' : 'application/json' })
67
+ headers = {'content-type' : 'application/json' }, timeout = client . timeout )
69
68
70
69
log ('debug' , 'Finished Segment.io request.' )
71
70
72
71
package_response (client , data , response )
73
72
73
+ return response .status_code == 200
74
+
74
75
except requests .ConnectionError as e :
75
76
package_exception (client , data , e )
77
+ except requests .Timeout as e :
78
+ package_exception (client , data , e )
79
+
80
+ return False
76
81
77
82
78
83
class FlushThread (threading .Thread ):
79
84
80
- def __init__ (self , client , url , batches ):
85
+ def __init__ (self , client ):
81
86
threading .Thread .__init__ (self )
82
87
self .client = client
83
- self .url = url
84
- self .batches = batches
85
88
86
89
def run (self ):
87
90
log ('debug' , 'Flushing thread running ...' )
88
91
89
- for data in self .batches :
90
- request (self .client , self .url , data )
92
+ self .client ._sync_flush ()
91
93
92
94
log ('debug' , 'Flushing thread done.' )
93
95
@@ -102,7 +104,8 @@ def __init__(self, secret=None,
102
104
log_level = logging .INFO , log = True ,
103
105
flush_at = 20 , flush_after = datetime .timedelta (0 , 10 ),
104
106
async = True , max_queue_size = 10000 ,
105
- stats = Statistics ()):
107
+ stats = Statistics (),
108
+ timeout = 10 ):
106
109
"""Create a new instance of a analytics-python Client
107
110
108
111
:param str secret: The Segment.io API secret
@@ -117,6 +120,7 @@ def __init__(self, secret=None,
117
120
: param bool async: True to have the client flush to the server on another
118
121
thread, therefore not blocking code (this is the default). False to
119
122
enable blocking and making the request on the calling thread.
123
+ : param float timeout: Number of seconds before timing out request to Segment.io
120
124
121
125
"""
122
126
@@ -140,6 +144,8 @@ def __init__(self, secret=None,
140
144
self .flush_at = flush_at
141
145
self .flush_after = flush_after
142
146
147
+ self .timeout = timeout
148
+
143
149
self .stats = stats
144
150
145
151
self .flush_lock = threading .Lock ()
@@ -321,7 +327,7 @@ def _enqueue(self, action):
321
327
submitted = True
322
328
323
329
else :
324
- log ('warn' , 'Segment.io queue is full' )
330
+ log ('warn' , 'analytics-python queue is full' )
325
331
326
332
if self ._should_flush ():
327
333
self .flush ()
@@ -350,8 +356,6 @@ def flush(self, async=None):
350
356
351
357
flushing = False
352
358
353
- url = options .host + options .endpoints ['batch' ]
354
-
355
359
# if the async parameter is provided, it overrides the client's settings
356
360
if async == None :
357
361
async = self .async
@@ -363,54 +367,52 @@ def flush(self, async=None):
363
367
364
368
if self ._flush_thread_is_free ():
365
369
366
- log ('debug' , 'Attempting asynchronous flush ...' )
367
-
368
- batches = self ._get_batches ()
369
- if len (batches ) > 0 :
370
-
371
- self .flushing_thread = FlushThread (self ,
372
- url , batches )
370
+ log ('debug' , 'Initiating asynchronous flush ..' )
373
371
374
- self .flushing_thread .start ()
372
+ self .flushing_thread = FlushThread (self )
373
+ self .flushing_thread .start ()
375
374
376
- flushing = True
375
+ flushing = True
377
376
378
377
else :
379
- log ('debug' , 'The flushing thread is still active, ' +
380
- 'cant flush right now' )
378
+ log ('debug' , 'The flushing thread is still active.' )
381
379
else :
382
380
383
381
# Flushes on this thread
384
- log ('debug' , 'Starting synchronous flush ...' )
385
-
386
- batches = self ._get_batches ()
387
- if len (batches ) > 0 :
388
- for data in batches :
389
- request (self , url , data )
390
- flushing = True
391
-
392
- log ('debug' , 'Finished synchronous flush.' )
382
+ log ('debug' , 'Initiating synchronous flush ..' )
383
+ self ._sync_flush ()
384
+ flushing = True
393
385
394
386
if flushing :
395
387
self .last_flushed = datetime .datetime .now ()
396
388
self .stats .flushes += 1
397
389
398
390
return flushing
399
391
400
- def _get_batches (self ):
392
+ def _sync_flush (self ):
393
+
394
+ log ('debug' , 'Starting flush ..' )
395
+
396
+ successful = 0
397
+ failed = 0
401
398
402
- batches = [ ]
399
+ url = options . host + options . endpoints [ 'batch' ]
403
400
404
401
while len (self .queue ) > 0 :
402
+
405
403
batch = []
406
404
for i in range (self .max_flush_size ):
407
405
if len (self .queue ) == 0 :
408
406
break
407
+
409
408
batch .append (self .queue .pop ())
410
409
411
- batches .append ({
412
- 'batch' : batch ,
413
- 'secret' : self .secret
414
- })
410
+ payload = {'batch' : batch , 'secret' : self .secret }
411
+
412
+ if request (self , url , payload ):
413
+ successful += len (batch )
414
+ else :
415
+ failed += len (batch )
415
416
416
- return batches
417
+ log ('debug' , 'Successfully flushed ' + str (successful ) + ' items [' +
418
+ str (failed ) + ' failed].' )
0 commit comments