@@ -494,6 +494,36 @@ def on_log_event(log_event):
494
494
1 , len (self .optimizely .notification_center .notification_listeners [enums .NotificationTypes .LOG_EVENT ]),
495
495
)
496
496
497
+ def test_warning_log_level_on_queue_overflow (self ):
498
+ """ Test that a warning log is created when events overflow the queue. """
499
+
500
+ # create scenario where the batch size (MAX_BATCH_SIZE) is significantly larger than the queue size
501
+ # use smaller batch size and higher timeout to avoid test flakiness
502
+ test_max_queue_size = 10
503
+ self .MAX_BATCH_SIZE = 1000
504
+
505
+ event_dispatcher = CustomEventDispatcher ()
506
+
507
+ with mock .patch .object (self .optimizely , 'logger' ) as mock_config_logging :
508
+ self .event_processor = BatchEventProcessor (
509
+ event_dispatcher ,
510
+ self .optimizely .logger ,
511
+ True ,
512
+ queue .Queue (maxsize = test_max_queue_size ),
513
+ )
514
+
515
+ for i in range (0 , self .MAX_BATCH_SIZE ):
516
+ user_event = self ._build_conversion_event (self .event_name )
517
+ self .event_processor .process (user_event )
518
+ event_dispatcher .expect_conversion (self .event_name , self .test_user_id )
519
+
520
+ time .sleep (self .TEST_TIMEOUT )
521
+
522
+ # queue is flushed, even though events overflow
523
+ self .assertEqual (0 , self .event_processor .event_queue .qsize ())
524
+ mock_config_logging .warning .assert_called_with ('Payload not accepted by the queue. Current size: {}'
525
+ .format (str (test_max_queue_size )))
526
+
497
527
498
528
class CustomForwardingEventDispatcher (object ):
499
529
def __init__ (self , is_updated = False ):
0 commit comments