@@ -9,11 +9,12 @@ import (
99 "strconv"
1010 "sync"
1111 "testing"
12+ "time"
1213)
1314
14- ///////////////////////////////////////////////////
15- // Tests
16- ////////////////////////////////////////////////// /
15+ /*
16+ Tests
17+ * /
1718func TestLoader (t * testing.T ) {
1819 t .Run ("test Load method" , func (t * testing.T ) {
1920 t .Parallel ()
@@ -328,6 +329,7 @@ func TestLoader(t *testing.T) {
328329 t .Parallel ()
329330 identityLoader , loadCalls := IDLoader [string ](0 )
330331 ctx := context .Background ()
332+ start := time .Now ()
331333 future1 := identityLoader .Load (ctx , "1" )
332334 future2 := identityLoader .Load (ctx , "1" )
333335
@@ -340,6 +342,12 @@ func TestLoader(t *testing.T) {
340342 t .Error (err .Error ())
341343 }
342344
345+ // also check that it took the full timeout to return
346+ var duration = time .Since (start )
347+ if duration < 16 * time .Millisecond {
348+ t .Errorf ("took %v when expected it to take more than 16 ms because of wait" , duration )
349+ }
350+
343351 calls := * loadCalls
344352 inner := []string {"1" }
345353 expected := [][]string {inner }
@@ -348,6 +356,45 @@ func TestLoader(t *testing.T) {
348356 }
349357 })
350358
359+ t .Run ("doesn't wait for timeout if Flush() is called" , func (t * testing.T ) {
360+ t .Parallel ()
361+ identityLoader , loadCalls := IDLoader [string ](0 )
362+ ctx := context .Background ()
363+ start := time .Now ()
364+ future1 := identityLoader .Load (ctx , "1" )
365+ future2 := identityLoader .Load (ctx , "2" )
366+
367+ // trigger them to be fetched immediately vs waiting for the 16 ms timer
368+ identityLoader .Flush ()
369+
370+ _ , err := future1 ()
371+ if err != nil {
372+ t .Error (err .Error ())
373+ }
374+ _ , err = future2 ()
375+ if err != nil {
376+ t .Error (err .Error ())
377+ }
378+
379+ var duration = time .Since (start )
380+ if duration > 2 * time .Millisecond {
381+ t .Errorf ("took %v when expected it to take less than 2 ms b/c we called Flush()" , duration )
382+ }
383+
384+ calls := * loadCalls
385+ inner := []string {"1" , "2" }
386+ expected := [][]string {inner }
387+ if ! reflect .DeepEqual (calls , expected ) {
388+ t .Errorf ("did not respect max batch size. Expected %#v, got %#v" , expected , calls )
389+ }
390+ })
391+
392+ t .Run ("Nothing for Flush() to do on empty loader with current batch" , func (t * testing.T ) {
393+ t .Parallel ()
394+ identityLoader , _ := IDLoader [string ](0 )
395+ identityLoader .Flush ()
396+ })
397+
351398 t .Run ("allows primed cache" , func (t * testing.T ) {
352399 t .Parallel ()
353400 identityLoader , loadCalls := IDLoader [string ](0 )
@@ -741,9 +788,9 @@ func FaultyLoader[K comparable]() (*Loader[K, K], *[][]K) {
741788 return loader , & loadCalls
742789}
743790
744- ///////////////////////////////////////////////////
745- // Benchmarks
746- ////////////////////////////////////////////////// /
791+ /*
792+ Benchmarks
793+ * /
747794var a = & Avg {}
748795
749796func batchIdentity [K comparable ](_ context.Context , keys []K ) (results []* Result [K ]) {
0 commit comments