"Fossies" - the Fresh Open Source Software Archive

Member "Atom/resources/app/apm/node_modules/concat-stream/node_modules/readable-stream/lib/_stream_transform.js" (11 Apr 2017, 6354 Bytes) of package /windows/misc/atom-windows.zip:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Javascript source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 // a transform stream is a readable/writable stream where you do
    2 // something with the data.  Sometimes it's called a "filter",
    3 // but that's not a great name for it, since that implies a thing where
    4 // some bits pass through, and others are simply ignored.  (That would
    5 // be a valid example of a transform, of course.)
    6 //
    7 // While the output is causally related to the input, it's not a
    8 // necessarily symmetric or synchronous transformation.  For example,
    9 // a zlib stream might take multiple plain-text writes(), and then
   10 // emit a single compressed chunk some time in the future.
   11 //
   12 // Here's how this works:
   13 //
   14 // The Transform stream has all the aspects of the readable and writable
   15 // stream classes.  When you write(chunk), that calls _write(chunk,cb)
   16 // internally, and returns false if there's a lot of pending writes
   17 // buffered up.  When you call read(), that calls _read(n) until
   18 // there's enough pending readable data buffered up.
   19 //
   20 // In a transform stream, the written data is placed in a buffer.  When
   21 // _read(n) is called, it transforms the queued up data, calling the
   22 // buffered _write cb's as it consumes chunks.  If consuming a single
   23 // written chunk would result in multiple output chunks, then the first
   24 // outputted bit calls the readcb, and subsequent chunks just go into
   25 // the read buffer, and will cause it to emit 'readable' if necessary.
   26 //
   27 // This way, back-pressure is actually determined by the reading side,
   28 // since _read has to be called to start processing a new chunk.  However,
   29 // a pathological inflate type of transform can cause excessive buffering
   30 // here.  For example, imagine a stream where every byte of input is
   31 // interpreted as an integer from 0-255, and then results in that many
   32 // bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
   33 // 1kb of data being output.  In this case, you could write a very small
   34 // amount of input, and end up with a very large amount of output.  In
   35 // such a pathological inflating mechanism, there'd be no way to tell
   36 // the system to stop doing the transform.  A single 4MB write could
   37 // cause the system to run out of memory.
   38 //
   39 // However, even in such a pathological case, only a single written chunk
   40 // would be consumed, and then the rest would wait (un-transformed) until
   41 // the results of the previous transformed chunk were consumed.
   42 
   43 'use strict';
   44 
   45 module.exports = Transform;
   46 
   47 var Duplex = require('./_stream_duplex');
   48 
   49 /*<replacement>*/
   50 var util = require('core-util-is');
   51 util.inherits = require('inherits');
   52 /*</replacement>*/
   53 
   54 util.inherits(Transform, Duplex);
   55 
   56 function TransformState(stream) {
   57   this.afterTransform = function (er, data) {
   58     return afterTransform(stream, er, data);
   59   };
   60 
   61   this.needTransform = false;
   62   this.transforming = false;
   63   this.writecb = null;
   64   this.writechunk = null;
   65   this.writeencoding = null;
   66 }
   67 
   68 function afterTransform(stream, er, data) {
   69   var ts = stream._transformState;
   70   ts.transforming = false;
   71 
   72   var cb = ts.writecb;
   73 
   74   if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
   75 
   76   ts.writechunk = null;
   77   ts.writecb = null;
   78 
   79   if (data !== null && data !== undefined) stream.push(data);
   80 
   81   cb(er);
   82 
   83   var rs = stream._readableState;
   84   rs.reading = false;
   85   if (rs.needReadable || rs.length < rs.highWaterMark) {
   86     stream._read(rs.highWaterMark);
   87   }
   88 }
   89 
   90 function Transform(options) {
   91   if (!(this instanceof Transform)) return new Transform(options);
   92 
   93   Duplex.call(this, options);
   94 
   95   this._transformState = new TransformState(this);
   96 
   97   // when the writable side finishes, then flush out anything remaining.
   98   var stream = this;
   99 
  100   // start out asking for a readable event once data is transformed.
  101   this._readableState.needReadable = true;
  102 
  103   // we have implemented the _read method, and done the other things
  104   // that Readable wants before the first _read call, so unset the
  105   // sync guard flag.
  106   this._readableState.sync = false;
  107 
  108   if (options) {
  109     if (typeof options.transform === 'function') this._transform = options.transform;
  110 
  111     if (typeof options.flush === 'function') this._flush = options.flush;
  112   }
  113 
  114   this.once('prefinish', function () {
  115     if (typeof this._flush === 'function') this._flush(function (er) {
  116       done(stream, er);
  117     });else done(stream);
  118   });
  119 }
  120 
  121 Transform.prototype.push = function (chunk, encoding) {
  122   this._transformState.needTransform = false;
  123   return Duplex.prototype.push.call(this, chunk, encoding);
  124 };
  125 
  126 // This is the part where you do stuff!
  127 // override this function in implementation classes.
  128 // 'chunk' is an input chunk.
  129 //
  130 // Call `push(newChunk)` to pass along transformed output
  131 // to the readable side.  You may call 'push' zero or more times.
  132 //
  133 // Call `cb(err)` when you are done with this chunk.  If you pass
  134 // an error, then that'll put the hurt on the whole operation.  If you
  135 // never call cb(), then you'll never get another chunk.
  136 Transform.prototype._transform = function (chunk, encoding, cb) {
  137   throw new Error('not implemented');
  138 };
  139 
  140 Transform.prototype._write = function (chunk, encoding, cb) {
  141   var ts = this._transformState;
  142   ts.writecb = cb;
  143   ts.writechunk = chunk;
  144   ts.writeencoding = encoding;
  145   if (!ts.transforming) {
  146     var rs = this._readableState;
  147     if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
  148   }
  149 };
  150 
  151 // Doesn't matter what the args are here.
  152 // _transform does all the work.
  153 // That we got here means that the readable side wants more data.
  154 Transform.prototype._read = function (n) {
  155   var ts = this._transformState;
  156 
  157   if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
  158     ts.transforming = true;
  159     this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
  160   } else {
  161     // mark that we need a transform, so that any data that comes in
  162     // will get processed, now that we've asked for it.
  163     ts.needTransform = true;
  164   }
  165 };
  166 
  167 function done(stream, er) {
  168   if (er) return stream.emit('error', er);
  169 
  170   // if there's nothing in the write buffer, then that means
  171   // that nothing more will ever be provided
  172   var ws = stream._writableState;
  173   var ts = stream._transformState;
  174 
  175   if (ws.length) throw new Error('calling transform done when ws.length != 0');
  176 
  177   if (ts.transforming) throw new Error('calling transform done when still transforming');
  178 
  179   return stream.push(null);
  180 }