-
Notifications
You must be signed in to change notification settings - Fork 326
/
Copy pathFetch_Spec.enso
555 lines (465 loc) · 29.9 KB
/
Fetch_Spec.enso
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
from Standard.Base import all
import Standard.Base.Data.Base_64.Base_64
import Standard.Base.Errors.Common.Response_Too_Large
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy
import Standard.Base.Network.HTTP.Request.Request
import Standard.Base.Network.HTTP.Request_Body.Request_Body
import Standard.Base.Network.HTTP.Response.Response
import Standard.Base.Runtime.Context
import Standard.Base.Runtime.Ref.Ref
from Standard.Table import all
import Standard.Table.Errors.Invalid_JSON_Format
from Standard.Test import all
import Standard.Test.Test_Environment
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
from enso_dev.Base_Tests.Network.Http.Http_Test_Setup import base_url_with_slash, pending_has_url
import project.Util
polyglot java import java.io.File as Java_File
polyglot java import java.lang.IllegalArgumentException
polyglot java import java.lang.NumberFormatException
polyglot java import org.enso.base.cache.DiskSpaceGetter
polyglot java import org.enso.base.cache.LRUCache
polyglot java import org.enso.base.cache.LRUCacheSettings
polyglot java import org.enso.base.cache.NowGetter
polyglot java import org.enso.base.cache.TotalCacheLimit
polyglot java import org.enso.base.enso_cloud.EnsoSecretHelper
main filter=Nothing =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter filter
add_specs suite_builder =
suite_builder.group "fetching files using HTTP" pending=pending_has_url group_builder->
group_builder.specify "fetching json" <| Test.with_retries <|
r = Data.fetch base_url_with_slash+"testfiles/table.json"
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r.to Table . should_equal expected_table
group_builder.specify "fetching csv" <| Test.with_retries <|
url = base_url_with_slash+"testfiles/table.csv"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r.should_be_a Table
r.should_equal expected_table
r2 = url.to_uri.fetch
r2.should_be_a Table
r2.should_equal expected_table
group_builder.specify "fetching xls" <| Test.with_retries <|
url = base_url_with_slash+"testfiles/table.xls"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r.should_be_a Excel_Workbook
r.sheet_names . should_equal ["MyTestSheet"]
r.read "MyTestSheet" . should_equal expected_table
r2 = Data.fetch url format=Raw_Response . decode (..Sheet "MyTestSheet")
r2.should_be_a Table
r2.should_equal expected_table
group_builder.specify "fetching xlsx" <| Test.with_retries <|
url = base_url_with_slash+"testfiles/table.xlsx"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r.should_be_a Excel_Workbook
r.sheet_names . should_equal ["MyTestSheet"]
r.read "MyTestSheet" . should_equal expected_table
r2 = Data.fetch url format=Raw_Response . decode (..Sheet "MyTestSheet")
r2.should_be_a Table
r2.should_equal expected_table
r3 = url.to_uri.fetch format=Raw_Response . decode (..Sheet "MyTestSheet")
r3.should_be_a Table
r3.should_equal expected_table
group_builder.specify "format detection based on Content-Type and Content-Disposition" <| Test.with_retries <|
content = 'A,B\n1,x\n3,y'
uri = URI.from (base_url_with_slash+"test_headers")
. add_query_argument "base64_response_data" (Base_64.encode_text content)
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r0 = uri.fetch
# No automatic parsing, because no content type information is specified.
r0.should_be_a Response
r0.content_type . should_equal Nothing
r0.get_header "Content-Disposition" . should_equal Nothing
r1 = (uri.add_query_argument "Content-Type" "text/csv").fetch
r1.should_equal expected_table
r2 = (uri.add_query_argument "Content-Disposition" 'attachment; filename="my_table.csv"').fetch
r2.should_equal expected_table
# If the disposition suggest a text file, we will parse as text:
r3 = (uri.add_query_argument "Content-Disposition" 'attachment; filename="text.txt"').fetch
r3.should_be_a Text
r3.should_equal content
# Reinterpreting as TSV:
r4 = (uri.add_query_argument "Content-Type" "text/tab-separated-values").fetch
r4.should_equal (Table.from_rows ["Column 1"] [["A,B"], ["1,x"], ["3,y"]])
suite_builder.group "Response caching" pending=pending_has_url group_builder->
with_temp_file file ~action =
deleter =
file.delete_if_exists
Panic.with_finalizer deleter <|
deleter
action
get_lru_cache =
EnsoSecretHelper.getOrCreateCache.getLRUCache
get_num_response_cache_entries =
get_lru_cache.getNumEntries
with_counts ~action =
before_count = get_num_response_cache_entries
action
after_count = get_num_response_cache_entries
[before_count, after_count]
expect_counts expected_counts ~action =
counts = with_counts action
counts . should_equal expected_counts frames_to_skip=1
get_cache_files : Vector Text
get_cache_files -> Vector Text =
Vector.from_polyglot_array EnsoSecretHelper.getOrCreateCache.getLRUCache.getFiles . sort Sort_Direction.Ascending
get_cache_file_sizes : Vector Integer
get_cache_file_sizes -> Vector Integer =
Vector.from_polyglot_array EnsoSecretHelper.getOrCreateCache.getLRUCache.getFileSizes . sort Sort_Direction.Ascending
# Creates a new cache each time, then resets it at the end
with_lru_cache lru_cache ~action =
reset = EnsoSecretHelper.getOrCreateCache.setLRUCache LRUCache.new
Panic.with_finalizer reset <|
EnsoSecretHelper.getOrCreateCache.setLRUCache lru_cache
action
# Creates a new cache each time, then resets it at the end
with_config max_file_size total_cache_size ~action =
now_getter = NowGetter.new
disk_space_getter = DiskSpaceGetter.new
lru_cache_settings = LRUCacheSettings.new max_file_size total_cache_size
lru_cache = LRUCache.new lru_cache_settings now_getter disk_space_getter
with_lru_cache lru_cache (action now_getter disk_space_getter)
# Creates a new cache each time, then resets it at the end
with_mocks ~action =
now_getter = NowGetter.new
disk_space_getter = DiskSpaceGetter.new
lru_cache = LRUCache.new LRUCacheSettings.getDefault now_getter disk_space_getter
with_lru_cache lru_cache (action now_getter disk_space_getter)
# Creates a new cache each time, then resets it at the end
with_default_cache ~action =
lru_cache = LRUCache.new
with_lru_cache lru_cache action
fake_reload =
EnsoSecretHelper.getOrCreateCache.getLRUCache.simulateReloadTestOnly
url0 = base_url_with_slash+'test_download?max-age=16&length=10'
url1 = base_url_with_slash+'test_download?max-age=16&length=20'
url_post = base_url_with_slash + "post"
headers0 = [Header.new "A-Header" "a-header-value", Header.new "A-Header" "a-header-value"]
headers1 = [Header.new "A-Header" "a-different-header-value", Header.new "A-Header" "a-header-value"]
# Run the request(s) twice and confirm the results are the same
check_same_results ~action =
results = 0.up_to 2 . map (_-> action)
results.distinct.length . should_equal 1
group_builder.specify "Cache should return the same repsonse" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
check_same_results <|
HTTP.fetch url0 . decode_as_text
get_num_response_cache_entries . should_equal 1
check_same_results <|
HTTP.fetch url1 . decode_as_text
get_num_response_cache_entries . should_equal 2
with_default_cache <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text
url1_body_1 = HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text . should_equal url1_body_1
get_num_response_cache_entries . should_equal 2
with_default_cache <|
url0_body_2 = HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url0_body_2
url1_body_2 = HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url1_body_2
get_num_response_cache_entries . should_equal 0
group_builder.specify "Cache should handle many entries" pending=pending_has_url <| Test.with_retries <|
count = 20
with_default_cache <|
check_same_results <|
0.up_to count . map i->
HTTP.fetch base_url_with_slash+"test_download?length="+i.to_text . decode_as_text
get_num_response_cache_entries . should_equal count
with_default_cache <|
check_same_results <|
0.up_to count . each i->
headers = [Header.new "A-Header" "a-header-value-"+i.to_text]
HTTP.fetch base_url_with_slash+"test_download?length=8" headers=headers . decode_as_text
get_num_response_cache_entries . should_equal count
group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
expect_counts [0, 0] <|
HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache
HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache
with_default_cache <|
expect_counts [0, 2] <|
HTTP.fetch url0
HTTP.fetch url1
group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
expect_counts [0, 0] <|
Data.fetch url0 cache_policy=Cache_Policy.No_Cache
Data.fetch url1 cache_policy=Cache_Policy.No_Cache
expect_counts [0, 2] <|
Data.fetch url0 cache_policy=Cache_Policy.Use_Cache
Data.fetch url1 cache_policy=Cache_Policy.Use_Cache
with_default_cache <|
expect_counts [0, 2] <|
Data.fetch url0
Data.fetch url1
group_builder.specify "Should not cache Data.download" pending=pending_has_url <| Test.with_retries <|
target_file = enso_project.data / "transient" / "my_download0.txt"
with_temp_file target_file <| with_default_cache <|
Data.download url0 target_file
get_num_response_cache_entries . should_equal 0
group_builder.specify "Data.download is not affected by caching limits" pending=pending_has_url <| Test.with_retries <|
target_file = enso_project.data / "transient" / "my_download0.txt"
with_temp_file target_file <| with_config 100 (TotalCacheLimit.Bytes.new 120) _-> _->
url = base_url_with_slash+"test_download?length=200"
Data.download url target_file
target_file.read.length . should_equal 200
Data.fetch url . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Should not cache for methods other than GET" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
expect_counts [0, 0] <|
Data.post url_post (Request_Body.Text "hello world")
group_builder.specify "HTTP request with a non-GET method should reject a cache_policy=Use_Cache argument" pending=pending_has_url <| Test.with_retries <|
request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty
HTTP.new.request request cache_policy=Cache_Policy.Use_Cache . should_fail_with Illegal_Argument
group_builder.specify "HTTP request with a non-GET method should not reject a cache_policy=No_Cache argument" pending=pending_has_url <| Test.with_retries <|
request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty
HTTP.new.request request cache_policy=Cache_Policy.No_Cache . should_succeed
group_builder.specify "Should be able to clear caches" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
Data.fetch url0
get_num_response_cache_entries . should_equal 1
with_default_cache <|
get_num_response_cache_entries . should_equal 0
group_builder.specify "Cache key should depend on the headers" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
expect_counts [0, 2] <|
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers1
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers0
Data.fetch url0 headers=headers1
group_builder.specify "Cache key should not depend on header order" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
header0 = Header.new "Abc" "eef"
header1 = Header.new "Abc" "def"
header2 = Header.new "Ghi" "jkl"
orders = [[header0, header1, header2], [header1, header2, header0], [header2, header1, header0]]
responses = orders.map headers->
Data.fetch url0 headers=headers . decode_as_text
get_num_response_cache_entries . should_equal 1
responses.distinct.length . should_equal 1
## Fetching the trigger uri causes stale entries to be removed, since the
uri is always different and so the caching and cleanup logic is run.
fake_now = Date_Time.now
trigger_uri_serial = Ref.new 0
make_trigger_uri =
serial = trigger_uri_serial.get
trigger_uri_serial.modify (_ + 1)
base_url_with_slash+'test_download?max-age=10000&length=50&abc='+serial.to_text
set_time_and_get_count now_mock advance_secs =
now_mock.mocked (fake_now + (Duration.new seconds=advance_secs))
trigger_uri = make_trigger_uri
Data.fetch trigger_uri
get_num_response_cache_entries
group_builder.specify "The cache should expire stale entries" pending=pending_has_url <| Test.with_retries <|
with_mocks now-> _->
set_time_and_get_count now 0 # Initialize fake now.
Data.fetch base_url_with_slash+'test_download?max-age=100&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=50'
Data.fetch base_url_with_slash+'test_download?max-age=200&length=51'
Data.fetch base_url_with_slash+'test_download?max-age=300&length=50'
## The count will increase by 1 each time, but decrease by the
number of entries removed
set_time_and_get_count now 0 . should_equal 6
set_time_and_get_count now 90 . should_equal 7
set_time_and_get_count now 110 . should_equal 7
set_time_and_get_count now 190 . should_equal 8
set_time_and_get_count now 202 . should_equal 7
set_time_and_get_count now 292 . should_equal 8
set_time_and_get_count now 301 . should_equal 8
group_builder.specify "The cache should use the Age response header" pending=pending_has_url <| Test.with_retries <|
with_mocks now-> _->
set_time_and_get_count now 0 # Initialize fake now.
Data.fetch base_url_with_slash+'test_download?max-age=100&age=50&length=50' # ttl 50
Data.fetch base_url_with_slash+'test_download?max-age=100&age=30&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=120&age=50&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=70&&length=50' # ttl 70
Data.fetch base_url_with_slash+'test_download?max-age=160&age=70&length=50' # ttl 90
## The count will increase by 1 each time, but decrease by the
number of entries removed
set_time_and_get_count now 0 . should_equal 7
set_time_and_get_count now 40 . should_equal 8
set_time_and_get_count now 51 . should_equal 8
set_time_and_get_count now 68 . should_equal 9
set_time_and_get_count now 72 . should_equal 7
set_time_and_get_count now 88 . should_equal 8
set_time_and_get_count now 93 . should_equal 8
fetch_n size =
Data.fetch base_url_with_slash+'test_download?length='+size.to_text
group_builder.specify "Will remove old cache files to keep the total cache size under the total cache size limit" pending=pending_has_url <| Test.with_retries <|
with_config 1000 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 30
fetch_n 50
fetch_n 10
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
fetch_n 20
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 50]
fetch_n 40
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 40]
fetch_n 35
get_cache_file_sizes . should_equal_ignoring_order [20, 35, 40]
group_builder.specify "Will remove old cache files based on how recently they were used" pending=pending_has_url <| Test.with_retries <|
with_config 1000 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 30
fetch_n 50
fetch_n 10
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
# Use 30 again so it's considered more recently used.
fetch_n 30
get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50]
fetch_n 20
get_cache_file_sizes . should_equal_ignoring_order [10, 20, 30]
fetch_n 45
get_cache_file_sizes . should_equal_ignoring_order [20, 30, 45]
group_builder.specify "Will not cache a file with a content length greater than the single file limit" pending=pending_has_url <| Test.with_retries <|
with_config 100 (TotalCacheLimit.Bytes.new 1000) _-> _->
fetch_n 110 . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Will not cache a file without a content length, but which is greater than the single file limit" pending=pending_has_url <| Test.with_retries <|
with_config 100 (TotalCacheLimit.Bytes.new 1000) _-> _->
url = base_url_with_slash+'test_download?omit-content-length=1&length=110'
Data.fetch url . should_fail_with (Response_Too_Large.Error 100)
group_builder.specify "Will make room for the largest possible file, if the server does not provide a content-length" pending=pending_has_url <| Test.with_retries <|
with_config 50 (TotalCacheLimit.Bytes.new 100) _-> _->
fetch_n 20
fetch_n 40
fetch_n 10
fetch_n 15
url = base_url_with_slash+'test_download?omit-content-length=1&length=2'
Data.fetch url . should_succeed
get_cache_file_sizes . should_equal_ignoring_order [10, 15, 2]
group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
HTTP.fetch url0
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'crash'
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'nonexistent_endpoint'
get_num_response_cache_entries . should_equal 1
cloud_setup = Cloud_Tests_Setup.prepare
group_builder.specify "Should work with secrets in the URI" pending=pending_has_url <| Test.with_retries <|
cloud_setup.with_prepared_environment <|
secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value"
secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value"
cleanup =
secret1.delete
secret2.delete
with_default_cache <| Panic.with_finalizer cleanup <|
# Requests differ only in secrets in URI.
url1 = URI.from 'https://httpbin.org/bytes/50'
. add_query_argument "arg1" secret1
. add_query_argument "arg2" "plain value"
uri2 = URI.from 'https://httpbin.org/bytes/50'
. add_query_argument "arg1" secret2
. add_query_argument "arg2" "plain value"
HTTP.fetch url1
get_num_response_cache_entries . should_equal 1
HTTP.fetch uri2
get_num_response_cache_entries . should_equal 2
group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <|
cloud_setup.with_prepared_environment <|
secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value"
secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value"
cleanup =
secret1.delete
secret2.delete
with_default_cache <| Panic.with_finalizer cleanup <|
# Requests differ only in secrets in headers.
uri = URI.from 'https://httpbin.org/bytes/50'
headers1 = [Header.new "A-Header" secret1]
headers2 = [Header.new "A-Header" secret2]
HTTP.fetch headers=headers1 uri
get_num_response_cache_entries . should_equal 1
HTTP.fetch headers=headers2 uri
get_num_response_cache_entries . should_equal 2
group_builder.specify "Cache limits should have defaults" <|
lru_cache = LRUCache.new
lru_cache.getSettings.getMaxFileSize . should_equal (2 * 1024 * 1024 * 1024)
lru_cache.getSettings.getTotalCacheLimit.percentage . should_equal 0.2
group_builder.specify "Should be able to set the max file size and total cache size (in MB) via environment variable" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "8" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "30" <|
with_default_cache <|
EnsoSecretHelper.getOrCreateCache.getLRUCache.getSettings.getMaxFileSize . should_equal (8 * 1024 * 1024)
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal (30 * 1024 * 1024)
group_builder.specify "Should be able to set the max file size and total cache size (as a percentage) via environment variable, and track changes to available disk space" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "8" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "10%" <|
with_mocks _-> disk_space->
EnsoSecretHelper.getOrCreateCache.getLRUCache.getSettings.getMaxFileSize . should_equal (8 * 1024 * 1024)
disk_space.mocked 300
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 30
disk_space.mocked 400
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 40
group_builder.specify "Includes the existing cache files in the total cache size calculation, for a percentage total cache limit" pending=pending_has_url <| Test.with_retries <|
with_config 1000 (TotalCacheLimit.Percentage.new 0.5) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 30
disk_space.mocked 70
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 20
disk_space.mocked 50
get_num_response_cache_entries . should_equal 2
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
fetch_n 10
disk_space.mocked 70
get_num_response_cache_entries . should_equal 2
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 50
group_builder.specify "Total cache size, specified in MB, should not go over the percentage hard limit" <|
with_config 1000 (TotalCacheLimit.Bytes.new 200) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 90
group_builder.specify "Total cache size, specified as a percentage, should not go over the percentage hard limit" <|
with_config 1000 (TotalCacheLimit.Percentage.new 0.95) _-> disk_space->
disk_space.mocked 100
EnsoSecretHelper.getOrCreateCache.getLRUCache.getMaxTotalCacheSize . should_equal 90
group_builder.specify "Falls back to the default if an environment variable is incorrectly formatted" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_FILE_SIZE_MB" "abcd" <|
LRUCache.new . getSettings . getMaxFileSize . should_equal (2 * 1024 * 1024 * 1024)
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "50q.%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)
group_builder.specify "Falls back to the default if the max total cache percentage is outside 0..100%" <|
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "-10%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)
Test_Environment.unsafe_with_environment_override "ENSO_LIB_HTTP_CACHE_MAX_TOTAL_CACHE_LIMIT" "101%" <|
LRUCache.new . getSettings . getTotalCacheLimit . should_equal (TotalCacheLimit.Percentage.new 0.2)
group_builder.specify "Cache should be cleared when a reload is detected" <|
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=10'
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=11'
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=12'
get_num_response_cache_entries . should_equal 3
fake_reload
get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=13'
get_num_response_cache_entries . should_equal 1
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=14'
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=15'
get_num_response_cache_entries . should_equal 3
fake_reload
get_num_response_cache_entries . should_equal 3 # Cleaning is not triggered until the next request
HTTP.fetch base_url_with_slash+'test_download?max-age=16&length=16'
get_num_response_cache_entries . should_equal 1
group_builder.specify "Reissues the request if the cache file disappears" pending=pending_has_url <| Test.with_retries <|
with_default_cache <|
url = base_url_with_slash+'test_download?max-age=16&length=10'
result0 = HTTP.fetch url . decode_as_text
result0.length . should_equal 10
HTTP.fetch url . decode_as_text . should_equal result0
get_num_response_cache_entries . should_equal 1
get_cache_files.map (f-> File.new f . delete)
result1 = HTTP.fetch url . decode_as_text
result1 . should_not_equal result0
result1.length . should_equal 10