@@ -3,43 +3,22 @@ var JsonParse = require('../jsonparse');
33var test = require ( 'tape' ) ;
44
55test ( 'can handle large tokens without running out of memory' , function ( t ) {
6- var size = 1024 * 1024 * 200 ; // 200mb
7- t . plan ( 1 ) ;
8- largeJsonStream ( size ) . pipe ( parseStream ( function ( type , value ) {
9- t . equal ( value . length , size , 'token should be size of input json' ) ;
10- t . end ( ) ;
11- } ) ) ;
12- } ) ;
13-
14- function largeJsonStream ( size , options ) {
15- var jsonStrream = new stream . Readable ( options ) ;
16- var sent = 0 ;
17-
18- jsonStrream . push ( '"' ) ;
19-
20- jsonStrream . _read = function ( readSize ) {
21- var bytesToSend = Math . min ( readSize , size - sent ) ;
22- jsonStrream . push ( Buffer . alloc ( bytesToSend , 'a' ) ) ;
23- sent += bytesToSend ;
24- if ( sent >= size ) {
25- jsonStrream . push ( '"' ) ;
26- jsonStrream . push ( null ) ;
27- }
28- } ;
29-
30- return jsonStrream ;
31- }
32-
33- function parseStream ( onToken , options ) {
34- var parseStream = new stream . Writable ( options ) ;
356 var parser = new JsonParse ( ) ;
7+ var chunkSize = 1024
8+ var chunks = 1024 * 200 ; // 200mb
9+ var quote = Buffer . from ? Buffer . from ( '"' ) : new Buffer ( '"' ) ;
10+ t . plan ( 1 ) ;
3611
37- parser . onToken = onToken ;
38-
39- parseStream . _write = function ( chunk , encoding , cb ) {
40- parser . write ( chunk ) ;
41- cb ( ) ;
12+ parser . onToken = function ( type , value ) {
13+ t . equal ( value . length , chunkSize * chunks , 'token should be size of input json' ) ;
14+ t . end ( ) ;
4215 } ;
4316
44- return parseStream ;
45- }
17+ parser . write ( quote ) ;
18+ for ( var i = 0 ; i < chunks ; ++ i ) {
19+ var buf = Buffer . alloc ? Buffer . alloc ( chunkSize ) : new Buffer ( chunkSize ) ;
20+ buf . fill ( 'a' ) ;
21+ parser . write ( buf ) ;
22+ }
23+ parser . write ( quote ) ;
24+ } ) ;
0 commit comments