1
1
'use strict' ;
2
2
3
+ const EventEmitter = require ( 'events' ) . EventEmitter ;
3
4
const fs = require ( 'fs' ) ;
4
5
const Stream = require ( 'stream' ) . Stream ;
5
- const utils = require ( 'haraka-utils' ) ;
6
6
7
- const ChunkEmitter = require ( 'haraka-chunk-emitter' ) ;
8
-
9
- const STATE_HEADERS = 1 ;
10
- const STATE_BODY = 2 ;
7
+ const STATE = {
8
+ HEADERS : 1 ,
9
+ BODY : 2 ,
10
+ } ;
11
11
12
12
class MessageStream extends Stream {
13
- constructor ( cfg , id , headers ) {
13
+ constructor ( cfg = { } , id , headers ) {
14
14
super ( ) ;
15
15
if ( ! id ) throw new Error ( 'id required' ) ;
16
16
this . uuid = id ;
17
17
this . write_ce = null ;
18
18
this . read_ce = null ;
19
19
this . bytes_read = 0 ;
20
- this . state = STATE_HEADERS ;
20
+ this . state = STATE . HEADERS ;
21
21
this . idx = { } ;
22
22
this . end_called = false ;
23
23
this . end_callback = null ;
@@ -67,16 +67,16 @@ class MessageStream extends Stream {
67
67
this . bytes_read += line . length ;
68
68
69
69
// Build up an index of 'interesting' data on the fly
70
- if ( this . state === STATE_HEADERS ) {
70
+ if ( this . state === STATE . HEADERS ) {
71
71
// Look for end of headers line
72
72
if ( line . length === 2 && line [ 0 ] === 0x0d && line [ 1 ] === 0x0a ) {
73
73
this . idx . headers = { start : 0 , end : this . bytes_read - line . length } ;
74
- this . state = STATE_BODY ;
74
+ this . state = STATE . BODY ;
75
75
this . idx . body = { start : this . bytes_read } ;
76
76
}
77
77
}
78
78
79
- if ( this . state === STATE_BODY ) {
79
+ if ( this . state === STATE . BODY ) {
80
80
// Look for MIME boundaries
81
81
if ( line . length > 4 && line [ 0 ] === 0x2d && line [ 1 ] == 0x2d ) {
82
82
let boundary = line . slice ( 2 ) . toString ( ) . replace ( / \s * $ / , '' ) ;
@@ -197,10 +197,7 @@ class MessageStream extends Stream {
197
197
}
198
198
}
199
199
200
- /*
201
- ** READABLE STREAM
202
- */
203
-
200
+ // READABLE STREAM
204
201
_read ( ) {
205
202
const self = this ;
206
203
if ( ! this . end_called ) {
@@ -231,9 +228,8 @@ class MessageStream extends Stream {
231
228
} ) ;
232
229
}
233
230
else {
234
- // Read the message body by line
235
- // If we have queued entries, then we didn't
236
- // create a queue file, so we read from memory.
231
+ // Read the message body by line. If we have queued entries, then
232
+ // we didn't create a queue file, so read from memory.
237
233
if ( this . _queue . length > 0 ) {
238
234
// TODO: implement start/end offsets
239
235
for ( let i = 0 ; i < this . _queue . length ; i ++ ) {
@@ -260,7 +256,7 @@ class MessageStream extends Stream {
260
256
261
257
process_buf ( buf ) {
262
258
let offset = 0 ;
263
- while ( ( offset = utils . indexOfLF ( buf ) ) !== - 1 ) {
259
+ while ( ( offset = indexOfLF ( buf ) ) !== - 1 ) {
264
260
let line = buf . slice ( 0 , offset + 1 ) ;
265
261
buf = buf . slice ( line . length ) ;
266
262
// Don't output headers if they where sent already
@@ -411,6 +407,13 @@ class MessageStream extends Stream {
411
407
}
412
408
}
413
409
410
+ function indexOfLF ( buf ) {
411
+ for ( let i = 0 ; i < buf . length ; i ++ ) {
412
+ if ( buf [ i ] === 0x0a ) return i ;
413
+ }
414
+ return - 1 ;
415
+ }
416
+
414
417
module . exports = MessageStream ;
415
418
416
419
class GetDataStream extends Stream {
@@ -439,3 +442,74 @@ class GetDataStream extends Stream {
439
442
// ignore
440
443
}
441
444
}
445
+
446
+ class ChunkEmitter extends EventEmitter {
447
+ constructor ( buffer_size ) {
448
+ super ( ) ;
449
+ this . buffer_size = parseInt ( buffer_size ) || ( 64 * 1024 ) ;
450
+ this . buf = null ;
451
+ this . pos = 0 ;
452
+ this . bufs = [ ] ;
453
+ this . bufs_size = 0 ;
454
+ }
455
+
456
+ fill ( input ) {
457
+ if ( typeof input === 'string' ) {
458
+ input = Buffer . from ( input ) ;
459
+ }
460
+
461
+ // Optimization: don't allocate a new buffer until the input we've
462
+ // had so far is bigger than our buffer size.
463
+ if ( ! this . buf ) {
464
+ // We haven't allocated a buffer yet
465
+ this . bufs . push ( input ) ;
466
+ this . bufs_size += input . length ;
467
+ if ( ( input . length + this . bufs_size ) > this . buffer_size ) {
468
+ this . buf = Buffer . alloc ( this . buffer_size ) ;
469
+ const in_new = Buffer . concat ( this . bufs , this . bufs_size ) ;
470
+ input = in_new ;
471
+ // Reset
472
+ this . bufs = [ ] ;
473
+ this . bufs_size = 0 ;
474
+ }
475
+ else {
476
+ return ;
477
+ }
478
+ }
479
+
480
+ while ( input . length > 0 ) {
481
+ let remaining = this . buffer_size - this . pos ;
482
+ if ( remaining === 0 ) {
483
+ this . emit ( 'data' , this . buf ) ; //.slice(0));
484
+ this . buf = Buffer . alloc ( this . buffer_size ) ;
485
+ this . pos = 0 ;
486
+ remaining = this . buffer_size ;
487
+ }
488
+ const to_write = ( ( remaining > input . length ) ? input . length : remaining ) ;
489
+ input . copy ( this . buf , this . pos , 0 , to_write ) ;
490
+ this . pos += to_write ;
491
+ input = input . slice ( to_write ) ;
492
+ }
493
+ }
494
+
495
+ end ( cb ) {
496
+ let emitted = false ;
497
+ if ( this . bufs_size > 0 ) {
498
+ this . emit ( 'data' , Buffer . concat ( this . bufs , this . bufs_size ) ) ;
499
+ emitted = true ;
500
+ }
501
+ else if ( this . pos > 0 ) {
502
+ this . emit ( 'data' , this . buf . slice ( 0 , this . pos ) ) ;
503
+ emitted = true ;
504
+ }
505
+ // Reset
506
+ this . buf = null ;
507
+ this . pos = 0 ;
508
+ this . bufs = [ ] ;
509
+ this . bufs_size = 0 ;
510
+ if ( cb && typeof cb === 'function' ) cb ( ) ;
511
+ return emitted ;
512
+ }
513
+ }
514
+
515
+ module . exports . ChunkEmitter = ChunkEmitter
0 commit comments