1- import { insertMany } from './ddb' ;
1+ jest . setTimeout ( 25000 ) ;
2+
3+ import { uniq } from 'lodash' ;
4+ import * as ddbHelpers from './ddb' ;
25import { parallelScanAsStream } from './parallel-scan-stream' ;
36
7+ async function delay ( ms : number ) {
8+ return new Promise ( r => {
9+ setTimeout ( r , ms ) ;
10+ } ) ;
11+ }
12+
413describe ( 'parallelScanAsStream' , ( ) => {
514 const files = [
6- { id : 'some-file-id-1' } ,
7- { id : 'some-file-id-2' } ,
8- { id : 'some-file-id-3' , fileSize : 100 } ,
9- { id : 'some-file-id-4' } ,
10- { id : 'some-file-id-5' } ,
11- { id : 'some-file-id-6' , fileSize : 200 } ,
12- { id : 'some-file-id-7' } ,
13- { id : 'some-file-id-8' } ,
14- { id : 'some-file-id-9' , fileSize : 300 } ,
15- { id : 'some-file-id-10' } ,
15+ { id : 'some-file-id-1' , isLarge : false } ,
16+ { id : 'some-file-id-2' , isLarge : false } ,
17+ { id : 'some-file-id-3' , fileSize : 100 , isLarge : false } ,
18+ { id : 'some-file-id-4' , isLarge : false } ,
19+ { id : 'some-file-id-5' , isLarge : false } ,
20+ { id : 'some-file-id-6' , fileSize : 200 , isLarge : false } ,
21+ { id : 'some-file-id-7' , isLarge : false } ,
22+ { id : 'some-file-id-8' , isLarge : false } ,
23+ { id : 'some-file-id-9' , fileSize : 300 , isLarge : false } ,
24+ { id : 'some-file-id-10' , isLarge : false } ,
1625 ] ;
1726
1827 beforeAll ( async ( ) => {
19- await insertMany ( { items : files , tableName : 'files' } ) ;
28+ await ddbHelpers . insertMany ( { items : files , tableName : 'files' } ) ;
2029 } ) ;
2130
2231 it ( 'should stream items with chunks of 2 with concurrency 1' , async ( ) => {
2332 const stream = await parallelScanAsStream (
2433 {
2534 TableName : 'files' ,
26- FilterExpression : 'attribute_exists(#id)' ,
35+ FilterExpression : 'attribute_exists(#id) and #isLarge = :false ' ,
2736 ExpressionAttributeNames : {
2837 '#id' : 'id' ,
38+ '#isLarge' : 'isLarge' ,
39+ } ,
40+ ExpressionAttributeValues : {
41+ ':false' : false ,
2942 } ,
3043 } ,
3144 { concurrency : 1 , chunkSize : 2 }
@@ -40,9 +53,13 @@ describe('parallelScanAsStream', () => {
4053 const stream = await parallelScanAsStream (
4154 {
4255 TableName : 'files' ,
43- FilterExpression : 'attribute_exists(#id)' ,
56+ FilterExpression : 'attribute_exists(#id) and #isLarge = :false ' ,
4457 ExpressionAttributeNames : {
4558 '#id' : 'id' ,
59+ '#isLarge' : 'isLarge' ,
60+ } ,
61+ ExpressionAttributeValues : {
62+ ':false' : false ,
4663 } ,
4764 } ,
4865 { concurrency : 5 , chunkSize : 2 }
@@ -58,4 +75,50 @@ describe('parallelScanAsStream', () => {
5875
5976 expect ( allItems ) . toHaveLength ( 10 ) ;
6077 } ) ;
78+
79+ it ( 'should pause calling dynamodb after highWaterMark reached' , async ( ) => {
80+ const scanSpy = jest . spyOn ( ddbHelpers , 'scan' ) ;
81+
82+ const megaByte = Buffer . alloc ( 1024 * 390 ) ; // Maximum allowed item size in ddb is 400KB
83+ const megaByteString = megaByte . toString ( ) ;
84+
85+ await ddbHelpers . insertMany ( {
86+ items : [
87+ { id : 'some-big-file-id-1' , isLarge : true , payload : megaByteString } ,
88+ { id : 'some-big-file-id-2' , isLarge : true , payload : megaByteString } ,
89+ { id : 'some-big-file-id-3' , isLarge : true , payload : megaByteString } ,
90+ { id : 'some-big-file-id-4' , isLarge : true , payload : megaByteString } ,
91+ { id : 'some-big-file-id-5' , isLarge : true , payload : megaByteString } ,
92+ ] ,
93+ tableName : 'files' ,
94+ } ) ;
95+
96+ const stream = await parallelScanAsStream (
97+ {
98+ TableName : 'files' ,
99+ FilterExpression : 'attribute_exists(#id) and #isLarge = :true' ,
100+ ExpressionAttributeNames : {
101+ '#id' : 'id' ,
102+ '#isLarge' : 'isLarge' ,
103+ } ,
104+ ExpressionAttributeValues : {
105+ ':true' : true ,
106+ } ,
107+ } ,
108+ { concurrency : 1 , chunkSize : 1 , highWaterMark : 1 }
109+ ) ;
110+
111+ const scanCallsByIteration = [ ] ;
112+ for await ( const _ of stream ) {
113+ expect ( _ ) . not . toBeUndefined ( ) ;
114+
115+ await delay ( 1000 ) ;
116+
117+ scanCallsByIteration . push ( scanSpy . mock . calls . length ) ;
118+ }
119+
120+ const scanCallsByIterationUniq = uniq ( scanCallsByIteration ) ;
121+
122+ expect ( scanCallsByIterationUniq ) . toEqual ( [ 1 , 2 ] ) ;
123+ } ) ;
61124} ) ;
0 commit comments