11'use strict' ;
22
3+ const EventEmitter = require ( 'events' ) . EventEmitter ;
34const fs = require ( 'fs' ) ;
45const Stream = require ( 'stream' ) . Stream ;
5- const utils = require ( 'haraka-utils' ) ;
66
7- const ChunkEmitter = require ( 'haraka-chunk-emitter' ) ;
8-
9- const STATE_HEADERS = 1 ;
10- const STATE_BODY = 2 ;
7+ const STATE = {
8+ HEADERS : 1 ,
9+ BODY : 2 ,
10+ } ;
1111
1212class MessageStream extends Stream {
13- constructor ( cfg , id , headers ) {
13+ constructor ( cfg = { } , id , headers ) {
1414 super ( ) ;
1515 if ( ! id ) throw new Error ( 'id required' ) ;
1616 this . uuid = id ;
1717 this . write_ce = null ;
1818 this . read_ce = null ;
1919 this . bytes_read = 0 ;
20- this . state = STATE_HEADERS ;
20+ this . state = STATE . HEADERS ;
2121 this . idx = { } ;
2222 this . end_called = false ;
2323 this . end_callback = null ;
@@ -67,16 +67,16 @@ class MessageStream extends Stream {
6767 this . bytes_read += line . length ;
6868
6969 // Build up an index of 'interesting' data on the fly
70- if ( this . state === STATE_HEADERS ) {
70+ if ( this . state === STATE . HEADERS ) {
7171 // Look for end of headers line
7272 if ( line . length === 2 && line [ 0 ] === 0x0d && line [ 1 ] === 0x0a ) {
7373 this . idx . headers = { start : 0 , end : this . bytes_read - line . length } ;
74- this . state = STATE_BODY ;
74+ this . state = STATE . BODY ;
7575 this . idx . body = { start : this . bytes_read } ;
7676 }
7777 }
7878
79- if ( this . state === STATE_BODY ) {
79+ if ( this . state === STATE . BODY ) {
8080 // Look for MIME boundaries
8181 if ( line . length > 4 && line [ 0 ] === 0x2d && line [ 1 ] == 0x2d ) {
8282 let boundary = line . slice ( 2 ) . toString ( ) . replace ( / \s * $ / , '' ) ;
@@ -197,10 +197,7 @@ class MessageStream extends Stream {
197197 }
198198 }
199199
200- /*
201- ** READABLE STREAM
202- */
203-
200+ // READABLE STREAM
204201 _read ( ) {
205202 const self = this ;
206203 if ( ! this . end_called ) {
@@ -231,9 +228,8 @@ class MessageStream extends Stream {
231228 } ) ;
232229 }
233230 else {
234- // Read the message body by line
235- // If we have queued entries, then we didn't
236- // create a queue file, so we read from memory.
231+ // Read the message body by line. If we have queued entries, then
232+ // we didn't create a queue file, so read from memory.
237233 if ( this . _queue . length > 0 ) {
238234 // TODO: implement start/end offsets
239235 for ( let i = 0 ; i < this . _queue . length ; i ++ ) {
@@ -260,7 +256,7 @@ class MessageStream extends Stream {
260256
261257 process_buf ( buf ) {
262258 let offset = 0 ;
263- while ( ( offset = utils . indexOfLF ( buf ) ) !== - 1 ) {
259+ while ( ( offset = indexOfLF ( buf ) ) !== - 1 ) {
264260 let line = buf . slice ( 0 , offset + 1 ) ;
265261 buf = buf . slice ( line . length ) ;
266262 // Don't output headers if they where sent already
@@ -411,6 +407,13 @@ class MessageStream extends Stream {
411407 }
412408}
413409
410+ function indexOfLF ( buf ) {
411+ for ( let i = 0 ; i < buf . length ; i ++ ) {
412+ if ( buf [ i ] === 0x0a ) return i ;
413+ }
414+ return - 1 ;
415+ }
416+
414417module . exports = MessageStream ;
415418
416419class GetDataStream extends Stream {
@@ -439,3 +442,74 @@ class GetDataStream extends Stream {
439442 // ignore
440443 }
441444}
445+
446+ class ChunkEmitter extends EventEmitter {
447+ constructor ( buffer_size ) {
448+ super ( ) ;
449+ this . buffer_size = parseInt ( buffer_size ) || ( 64 * 1024 ) ;
450+ this . buf = null ;
451+ this . pos = 0 ;
452+ this . bufs = [ ] ;
453+ this . bufs_size = 0 ;
454+ }
455+
456+ fill ( input ) {
457+ if ( typeof input === 'string' ) {
458+ input = Buffer . from ( input ) ;
459+ }
460+
461+ // Optimization: don't allocate a new buffer until the input we've
462+ // had so far is bigger than our buffer size.
463+ if ( ! this . buf ) {
464+ // We haven't allocated a buffer yet
465+ this . bufs . push ( input ) ;
466+ this . bufs_size += input . length ;
467+ if ( ( input . length + this . bufs_size ) > this . buffer_size ) {
468+ this . buf = Buffer . alloc ( this . buffer_size ) ;
469+ const in_new = Buffer . concat ( this . bufs , this . bufs_size ) ;
470+ input = in_new ;
471+ // Reset
472+ this . bufs = [ ] ;
473+ this . bufs_size = 0 ;
474+ }
475+ else {
476+ return ;
477+ }
478+ }
479+
480+ while ( input . length > 0 ) {
481+ let remaining = this . buffer_size - this . pos ;
482+ if ( remaining === 0 ) {
483+ this . emit ( 'data' , this . buf ) ; //.slice(0));
484+ this . buf = Buffer . alloc ( this . buffer_size ) ;
485+ this . pos = 0 ;
486+ remaining = this . buffer_size ;
487+ }
488+ const to_write = ( ( remaining > input . length ) ? input . length : remaining ) ;
489+ input . copy ( this . buf , this . pos , 0 , to_write ) ;
490+ this . pos += to_write ;
491+ input = input . slice ( to_write ) ;
492+ }
493+ }
494+
495+ end ( cb ) {
496+ let emitted = false ;
497+ if ( this . bufs_size > 0 ) {
498+ this . emit ( 'data' , Buffer . concat ( this . bufs , this . bufs_size ) ) ;
499+ emitted = true ;
500+ }
501+ else if ( this . pos > 0 ) {
502+ this . emit ( 'data' , this . buf . slice ( 0 , this . pos ) ) ;
503+ emitted = true ;
504+ }
505+ // Reset
506+ this . buf = null ;
507+ this . pos = 0 ;
508+ this . bufs = [ ] ;
509+ this . bufs_size = 0 ;
510+ if ( cb && typeof cb === 'function' ) cb ( ) ;
511+ return emitted ;
512+ }
513+ }
514+
515+ module . exports . ChunkEmitter = ChunkEmitter
0 commit comments