1
1
import { afterEach , beforeEach , describe , expect , it , vi } from "vitest" ;
2
2
3
- import doShardedTagCache from "./do-sharded-tag-cache" ;
3
+ import doShardedTagCache , {
4
+ DEFAULT_HARD_REPLICAS ,
5
+ DEFAULT_SOFT_REPLICAS ,
6
+ TagCacheDOId ,
7
+ } from "./do-sharded-tag-cache" ;
4
8
5
9
const hasBeenRevalidatedMock = vi . fn ( ) ;
6
10
const writeTagsMock = vi . fn ( ) ;
@@ -9,9 +13,15 @@ const getMock = vi
9
13
. fn ( )
10
14
. mockReturnValue ( { hasBeenRevalidated : hasBeenRevalidatedMock , writeTags : writeTagsMock } ) ;
11
15
const waitUntilMock = vi . fn ( ) . mockImplementation ( async ( fn ) => fn ( ) ) ;
16
+ const sendDLQMock = vi . fn ( ) ;
12
17
vi . mock ( "./cloudflare-context" , ( ) => ( {
13
18
getCloudflareContext : ( ) => ( {
14
- env : { NEXT_CACHE_D1_SHARDED : { idFromName : idFromNameMock , get : getMock } } ,
19
+ env : {
20
+ NEXT_CACHE_DO_SHARDED : { idFromName : idFromNameMock , get : getMock } ,
21
+ NEXT_CACHE_DO_SHARDED_DLQ : {
22
+ send : sendDLQMock ,
23
+ } ,
24
+ } ,
15
25
ctx : { waitUntil : waitUntilMock } ,
16
26
} ) ,
17
27
} ) ) ;
@@ -22,24 +32,81 @@ describe("DOShardedTagCache", () => {
22
32
describe ( "generateShardId" , ( ) => {
23
33
it ( "should generate a shardId" , ( ) => {
24
34
const cache = doShardedTagCache ( ) ;
25
- const expectedResult = new Map ( ) ;
26
- expectedResult . set ( "shard-1" , [ "tag1" ] ) ;
27
- expectedResult . set ( "shard-2" , [ "tag2" ] ) ;
28
- expect ( cache . generateShards ( [ "tag1" , "tag2" ] ) ) . toEqual ( expectedResult ) ;
35
+ const expectedResult = [
36
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-1" } ) , tags : [ "tag1" ] } ,
37
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-2" } ) , tags : [ "tag2" ] } ,
38
+ ] ;
39
+ const result = cache . groupTagsByDO ( { tags : [ "tag1" , "tag2" ] } ) ;
40
+ expect ( result ) . toEqual ( expectedResult ) ;
41
+ expect ( result [ 0 ] ?. doId . key ) . toBe ( "tag-hard;shard-1;replica-1" ) ;
42
+ expect ( result [ 1 ] ?. doId . key ) . toBe ( "tag-hard;shard-2;replica-1" ) ;
29
43
} ) ;
30
44
31
45
it ( "should group tags by shard" , ( ) => {
32
46
const cache = doShardedTagCache ( ) ;
33
- const expectedResult = new Map ( ) ;
34
- expectedResult . set ( "shard-1" , [ "tag1" , "tag6" ] ) ;
35
- expect ( cache . generateShards ( [ "tag1" , "tag6" ] ) ) . toEqual ( expectedResult ) ;
47
+ const expectedResult = [
48
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-1" } ) , tags : [ "tag1" , "tag6" ] } ,
49
+ ] ;
50
+ const result = cache . groupTagsByDO ( { tags : [ "tag1" , "tag6" ] } ) ;
51
+ expect ( result ) . toEqual ( expectedResult ) ;
52
+ expect ( result [ 0 ] ?. doId . key ) . toBe ( "tag-hard;shard-1;replica-1" ) ;
36
53
} ) ;
37
54
38
55
it ( "should generate the same shardId for the same tag" , ( ) => {
39
56
const cache = doShardedTagCache ( ) ;
40
- const firstResult = cache . generateShards ( [ "tag1" ] ) ;
41
- const secondResult = cache . generateShards ( [ "tag1" , "tag3" , "tag4" ] ) ;
42
- expect ( firstResult . get ( "shard-1" ) ) . toEqual ( secondResult . get ( "shard-1" ) ) ;
57
+ const firstResult = cache . groupTagsByDO ( { tags : [ "tag1" ] } ) ;
58
+ const secondResult = cache . groupTagsByDO ( { tags : [ "tag1" , "tag3" , "tag4" ] } ) ;
59
+ expect ( firstResult [ 0 ] ) . toEqual ( secondResult [ 0 ] ) ;
60
+ } ) ;
61
+
62
+ it ( "should split hard and soft tags" , ( ) => {
63
+ const cache = doShardedTagCache ( ) ;
64
+ const expectedResult = [
65
+ { doId : expect . objectContaining ( { shardId : "tag-soft;shard-3" } ) , tags : [ "_N_T_/tag1" ] } ,
66
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-1" , replicaId : 1 } ) , tags : [ "tag1" ] } ,
67
+ ] ;
68
+ const result = cache . groupTagsByDO ( { tags : [ "tag1" , "_N_T_/tag1" ] } ) ;
69
+ expect ( result ) . toEqual ( expectedResult ) ;
70
+ expect ( result [ 1 ] ?. doId . key ) . toBe ( "tag-hard;shard-1;replica-1" ) ;
71
+ expect ( result [ 0 ] ?. doId . key ) . toBe ( "tag-soft;shard-3;replica-1" ) ;
72
+ } ) ;
73
+
74
+ describe ( "with shard replication" , ( ) => {
75
+ it ( "should generate all doIds if generateAllReplicas is true" , ( ) => {
76
+ const cache = doShardedTagCache ( { baseShardSize : 4 , enableShardReplication : true } ) ;
77
+ const expectedResult = [
78
+ { doId : expect . objectContaining ( { shardId : "tag-soft;shard-3" } ) , tags : [ "_N_T_/tag1" ] } ,
79
+ { doId : expect . objectContaining ( { shardId : "tag-soft;shard-3" } ) , tags : [ "_N_T_/tag1" ] } ,
80
+ { doId : expect . objectContaining ( { shardId : "tag-soft;shard-3" } ) , tags : [ "_N_T_/tag1" ] } ,
81
+ { doId : expect . objectContaining ( { shardId : "tag-soft;shard-3" } ) , tags : [ "_N_T_/tag1" ] } ,
82
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-1" } ) , tags : [ "tag1" ] } ,
83
+ { doId : expect . objectContaining ( { shardId : "tag-hard;shard-1" } ) , tags : [ "tag1" ] } ,
84
+ ] ;
85
+ const result = cache . groupTagsByDO ( { tags : [ "tag1" , "_N_T_/tag1" ] , generateAllReplicas : true } ) ;
86
+ console . log ( result ) ;
87
+ expect ( result ) . toEqual ( expectedResult ) ;
88
+ } ) ;
89
+
90
+ it ( "should generate only one doIds by tag type if generateAllReplicas is false" , ( ) => {
91
+ const cache = doShardedTagCache ( { baseShardSize : 4 , enableShardReplication : true } ) ;
92
+ const shardedTagCollection = cache . groupTagsByDO ( {
93
+ tags : [ "tag1" , "_N_T_/tag1" ] ,
94
+ generateAllReplicas : false ,
95
+ } ) ;
96
+ expect ( shardedTagCollection . length ) . toBe ( 2 ) ;
97
+ const firstDOId = shardedTagCollection [ 0 ] ?. doId ;
98
+ const secondDOId = shardedTagCollection [ 1 ] ?. doId ;
99
+
100
+ expect ( firstDOId ?. shardId ) . toBe ( "tag-soft;shard-3" ) ;
101
+ expect ( secondDOId ?. shardId ) . toBe ( "tag-hard;shard-1" ) ;
102
+
103
+ // We still need to check if the last part is between the correct boundaries
104
+ expect ( firstDOId ?. replicaId ) . toBeGreaterThanOrEqual ( 1 ) ;
105
+ expect ( firstDOId ?. replicaId ) . toBeLessThanOrEqual ( DEFAULT_SOFT_REPLICAS ) ;
106
+
107
+ expect ( secondDOId ?. replicaId ) . toBeGreaterThanOrEqual ( 1 ) ;
108
+ expect ( secondDOId ?. replicaId ) . toBeLessThanOrEqual ( DEFAULT_HARD_REPLICAS ) ;
109
+ } ) ;
43
110
} ) ;
44
111
} ) ;
45
112
@@ -115,7 +182,7 @@ describe("DOShardedTagCache", () => {
115
182
expect ( cache . putToRegionalCache ) . toHaveBeenCalled ( ) ;
116
183
} ) ;
117
184
118
- it ( "should call all the shards " , async ( ) => {
185
+ it ( "should call all the durable object instance " , async ( ) => {
119
186
const cache = doShardedTagCache ( ) ;
120
187
cache . getFromRegionalCache = vi . fn ( ) ;
121
188
const result = await cache . hasBeenRevalidated ( [ "tag1" , "tag2" ] , 123456 ) ;
@@ -130,6 +197,11 @@ describe("DOShardedTagCache", () => {
130
197
globalThis . openNextConfig = {
131
198
dangerous : { disableTagCache : false } ,
132
199
} ;
200
+ vi . useFakeTimers ( ) ;
201
+ vi . setSystemTime ( 1000 ) ;
202
+ } ) ;
203
+ afterEach ( ( ) => {
204
+ vi . useRealTimers ( ) ;
133
205
} ) ;
134
206
it ( "should return early if the cache is disabled" , async ( ) => {
135
207
globalThis . openNextConfig = {
@@ -146,24 +218,37 @@ describe("DOShardedTagCache", () => {
146
218
await cache . writeTags ( [ "tag1" ] ) ;
147
219
expect ( idFromNameMock ) . toHaveBeenCalled ( ) ;
148
220
expect ( writeTagsMock ) . toHaveBeenCalled ( ) ;
149
- expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag1" ] ) ;
221
+ expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag1" ] , 1000 ) ;
150
222
} ) ;
151
223
152
224
it ( "should write the tags to the cache for multiple shards" , async ( ) => {
153
225
const cache = doShardedTagCache ( ) ;
154
226
await cache . writeTags ( [ "tag1" , "tag2" ] ) ;
155
227
expect ( idFromNameMock ) . toHaveBeenCalledTimes ( 2 ) ;
156
228
expect ( writeTagsMock ) . toHaveBeenCalledTimes ( 2 ) ;
157
- expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag1" ] ) ;
158
- expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag2" ] ) ;
229
+ expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag1" ] , 1000 ) ;
230
+ expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag2" ] , 1000 ) ;
231
+ } ) ;
232
+
233
+ it ( 'should write to all the replicated shards if "generateAllReplicas" is true' , async ( ) => {
234
+ const cache = doShardedTagCache ( { baseShardSize : 4 , enableShardReplication : true } ) ;
235
+ await cache . writeTags ( [ "tag1" , "_N_T_/tag1" ] ) ;
236
+ expect ( idFromNameMock ) . toHaveBeenCalledTimes ( 6 ) ;
237
+ expect ( writeTagsMock ) . toHaveBeenCalledTimes ( 6 ) ;
238
+ expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "tag1" ] , 1000 ) ;
239
+ expect ( writeTagsMock ) . toHaveBeenCalledWith ( [ "_N_T_/tag1" ] , 1000 ) ;
159
240
} ) ;
160
241
161
242
it ( "should call deleteRegionalCache" , async ( ) => {
162
243
const cache = doShardedTagCache ( ) ;
163
244
cache . deleteRegionalCache = vi . fn ( ) ;
164
245
await cache . writeTags ( [ "tag1" ] ) ;
165
246
expect ( cache . deleteRegionalCache ) . toHaveBeenCalled ( ) ;
166
- expect ( cache . deleteRegionalCache ) . toHaveBeenCalledWith ( "shard-1" , [ "tag1" ] ) ;
247
+ expect ( cache . deleteRegionalCache ) . toHaveBeenCalledWith (
248
+ expect . objectContaining ( { key : "tag-hard;shard-1;replica-1" } ) ,
249
+ [ "tag1" ]
250
+ ) ;
251
+ // expect(cache.deleteRegionalCache).toHaveBeenCalledWith("tag-hard;shard-1;replica-1", ["tag1"]);
167
252
} ) ;
168
253
} ) ;
169
254
@@ -178,7 +263,7 @@ describe("DOShardedTagCache", () => {
178
263
globalThis . caches = {
179
264
open : vi . fn ( ) . mockResolvedValue ( "cache" ) ,
180
265
} ;
181
- const cache = doShardedTagCache ( { numberOfShards : 4 , regionalCache : true } ) ;
266
+ const cache = doShardedTagCache ( { baseShardSize : 4 , regionalCache : true } ) ;
182
267
expect ( cache . localCache ) . toBeUndefined ( ) ;
183
268
expect ( await cache . getCacheInstance ( ) ) . toBe ( "cache" ) ;
184
269
expect ( cache . localCache ) . toBe ( "cache" ) ;
@@ -190,7 +275,12 @@ describe("DOShardedTagCache", () => {
190
275
describe ( "getFromRegionalCache" , ( ) => {
191
276
it ( "should return undefined if regional cache is disabled" , async ( ) => {
192
277
const cache = doShardedTagCache ( ) ;
193
- expect ( await cache . getFromRegionalCache ( "shard-1" , [ "tag1" ] ) ) . toBeUndefined ( ) ;
278
+ const doId = new TagCacheDOId ( {
279
+ baseShardId : "shard-1" ,
280
+ numberOfReplicas : 1 ,
281
+ shardType : "hard" ,
282
+ } ) ;
283
+ expect ( await cache . getFromRegionalCache ( doId , [ "tag1" ] ) ) . toBeUndefined ( ) ;
194
284
} ) ;
195
285
196
286
it ( "should call .match on the cache" , async ( ) => {
@@ -200,10 +290,82 @@ describe("DOShardedTagCache", () => {
200
290
match : vi . fn ( ) . mockResolvedValue ( "response" ) ,
201
291
} ) ,
202
292
} ;
203
- const cache = doShardedTagCache ( { numberOfShards : 4 , regionalCache : true } ) ;
204
- expect ( await cache . getFromRegionalCache ( "shard-1" , [ "tag1" ] ) ) . toBe ( "response" ) ;
293
+ const cache = doShardedTagCache ( { baseShardSize : 4 , regionalCache : true } ) ;
294
+ const doId = new TagCacheDOId ( {
295
+ baseShardId : "shard-1" ,
296
+ numberOfReplicas : 1 ,
297
+ shardType : "hard" ,
298
+ } ) ;
299
+ expect ( await cache . getFromRegionalCache ( doId , [ "tag1" ] ) ) . toBe ( "response" ) ;
205
300
// @ts -expect-error - Defined on cloudfare context
206
301
globalThis . caches = undefined ;
207
302
} ) ;
208
303
} ) ;
304
+
305
+ describe ( "getCacheKey" , ( ) => {
306
+ it ( "should return the cache key without the random part" , async ( ) => {
307
+ const cache = doShardedTagCache ( ) ;
308
+ const doId1 = new TagCacheDOId ( { baseShardId : "shard-0" , numberOfReplicas : 1 , shardType : "hard" } ) ;
309
+ const reqKey = await cache . getCacheKey ( doId1 , [ "_N_T_/tag1" ] ) ;
310
+ expect ( reqKey . url ) . toBe ( "http://local.cache/shard/tag-hard;shard-0?tags=_N_T_%2Ftag1" ) ;
311
+
312
+ const doId2 = new TagCacheDOId ( {
313
+ baseShardId : "shard-1" ,
314
+ numberOfReplicas : 1 ,
315
+ shardType : "hard" ,
316
+ } ) ;
317
+ const reqKey2 = await cache . getCacheKey ( doId2 , [ "tag1" ] ) ;
318
+ expect ( reqKey2 . url ) . toBe ( "http://local.cache/shard/tag-hard;shard-1?tags=tag1" ) ;
319
+ } ) ;
320
+ } ) ;
321
+
322
+ describe ( "performWriteTagsWithRetry" , ( ) => {
323
+ it ( "should retry if it fails" , async ( ) => {
324
+ vi . useFakeTimers ( ) ;
325
+ vi . setSystemTime ( 1000 ) ;
326
+ const cache = doShardedTagCache ( ) ;
327
+ writeTagsMock . mockImplementationOnce ( ( ) => {
328
+ throw new Error ( "error" ) ;
329
+ } ) ;
330
+ const spiedFn = vi . spyOn ( cache , "performWriteTagsWithRetry" ) ;
331
+ const doId = new TagCacheDOId ( {
332
+ baseShardId : "shard-1" ,
333
+ numberOfReplicas : 1 ,
334
+ shardType : "hard" ,
335
+ } ) ;
336
+ await cache . performWriteTagsWithRetry ( doId , [ "tag1" ] , Date . now ( ) ) ;
337
+ expect ( writeTagsMock ) . toHaveBeenCalledTimes ( 2 ) ;
338
+ expect ( spiedFn ) . toHaveBeenCalledTimes ( 2 ) ;
339
+ expect ( spiedFn ) . toHaveBeenCalledWith ( doId , [ "tag1" ] , 1000 , 1 ) ;
340
+ expect ( sendDLQMock ) . not . toHaveBeenCalled ( ) ;
341
+
342
+ vi . useRealTimers ( ) ;
343
+ } ) ;
344
+
345
+ it ( "should stop retrying after 3 times" , async ( ) => {
346
+ vi . useFakeTimers ( ) ;
347
+ vi . setSystemTime ( 1000 ) ;
348
+ const cache = doShardedTagCache ( ) ;
349
+ writeTagsMock . mockImplementationOnce ( ( ) => {
350
+ throw new Error ( "error" ) ;
351
+ } ) ;
352
+ const spiedFn = vi . spyOn ( cache , "performWriteTagsWithRetry" ) ;
353
+ await cache . performWriteTagsWithRetry (
354
+ new TagCacheDOId ( { baseShardId : "shard-1" , numberOfReplicas : 1 , shardType : "hard" } ) ,
355
+ [ "tag1" ] ,
356
+ Date . now ( ) ,
357
+ 3
358
+ ) ;
359
+ expect ( writeTagsMock ) . toHaveBeenCalledTimes ( 1 ) ;
360
+ expect ( spiedFn ) . toHaveBeenCalledTimes ( 1 ) ;
361
+
362
+ expect ( sendDLQMock ) . toHaveBeenCalledWith ( {
363
+ failingShardId : "tag-hard;shard-1;replica-1" ,
364
+ failingTags : [ "tag1" ] ,
365
+ lastModified : 1000 ,
366
+ } ) ;
367
+
368
+ vi . useRealTimers ( ) ;
369
+ } ) ;
370
+ } ) ;
209
371
} ) ;
0 commit comments