@@ -397,4 +397,40 @@ describe('checksum cache', function () {
397397 [ { bucket : 'test' , end : '123' } ]
398398 ] ) ;
399399 } ) ;
400+
401+ it ( 'should handle concurrent requests greater than cache size' , async function ( ) {
402+ // This will not be cached efficiently, but we test that we don't get errors at least.
403+ let lookups : FetchPartialBucketChecksum [ ] [ ] = [ ] ;
404+ const cache = new ChecksumCache ( {
405+ fetchChecksums : async ( batch ) => {
406+ lookups . push ( batch ) ;
407+ return fetchTestChecksums ( batch ) ;
408+ } ,
409+ maxSize : 2
410+ } ) ;
411+
412+ const p3 = cache . getChecksums ( '123' , [ 'test3' ] ) ;
413+ const p4 = cache . getChecksums ( '123' , [ 'test4' ] ) ;
414+ const p1 = cache . getChecksums ( '123' , [ 'test' ] ) ;
415+ const p2 = cache . getChecksums ( '123' , [ 'test2' ] ) ;
416+
417+ expect ( await p1 ) . toEqual ( [ TEST_123 ] ) ;
418+ expect ( await p2 ) . toEqual ( [ TEST2_123 ] ) ;
419+ expect ( await p3 ) . toEqual ( [ TEST3_123 ] ) ;
420+ expect ( await p4 ) . toEqual ( [
421+ {
422+ bucket : 'test4' ,
423+ checksum : 1004797863 ,
424+ count : 123
425+ }
426+ ] ) ;
427+
428+ // The lookup should be deduplicated, even though it's in progress
429+ expect ( lookups ) . toEqual ( [
430+ [ { bucket : 'test3' , end : '123' } ] ,
431+ [ { bucket : 'test4' , end : '123' } ] ,
432+ [ { bucket : 'test' , end : '123' } ] ,
433+ [ { bucket : 'test2' , end : '123' } ]
434+ ] ) ;
435+ } ) ;
400436} ) ;
0 commit comments