我认为Transform
这适合于此,但我会将膨胀作为管道中的一个单独步骤.
这是一个快速且基本未经测试的例子:
var zlib = require('zlib'); var stream = require('stream'); var transformer = new stream.Transform(); // Properties used to keep internal state of transformer. transformer._buffers = []; transformer._inputSize = 0; transformer._targetSize = 1024 * 38; // Dump one 'output packet' transformer._dump = function(done) { // concatenate buffers and convert to binary string var buffer = Buffer.concat(this._buffers).toString('binary'); // Take first 1024 packets. var packetBuffer = buffer.substring(0, this._targetSize); // Keep the rest and reset counter. this._buffers = [ new Buffer(buffer.substring(this._targetSize)) ]; this._inputSize = this._buffers[0].length; // output header this.push('HELLO WORLD'); // output compressed packet buffer zlib.deflate(packetBuffer, function(err, compressed) { // TODO: handle `err` this.push(compressed); if (done) { done(); } }.bind(this)); }; // Main transformer logic: buffer chunks and dump them once the // target size has been met. transformer._transform = function(chunk, encoding, done) { this._buffers.push(chunk); this._inputSize += chunk.length; if (this._inputSize >= this._targetSize) { this._dump(done); } else { done(); } }; // Flush any remaining buffers. transformer._flush = function() { this._dump(); }; // Example: var fs = require('fs'); fs.createReadStream('depth_1000000') .pipe(zlib.createInflate()) .pipe(transformer) .pipe(fs.createWriteStream('depth_1000000.out'));
小智.. 5
push
如果要写入的流(在本例中为文件输出流)缓冲了太多数据,则返回false.由于您正在写入磁盘,因此这是有道理的:您处理数据的速度比写出来的速度快.
当out
缓冲区已满时,您的转换流将无法推送,并开始缓冲数据本身.如果该缓冲区应该填充,那么inp
将开始填充.这就是事情应该如何运作.管道流只会像链中最慢的链接一样快地处理数据(一旦缓冲区已满).
我认为Transform
这适合于此,但我会将膨胀作为管道中的一个单独步骤.
这是一个快速且基本未经测试的例子:
var zlib = require('zlib'); var stream = require('stream'); var transformer = new stream.Transform(); // Properties used to keep internal state of transformer. transformer._buffers = []; transformer._inputSize = 0; transformer._targetSize = 1024 * 38; // Dump one 'output packet' transformer._dump = function(done) { // concatenate buffers and convert to binary string var buffer = Buffer.concat(this._buffers).toString('binary'); // Take first 1024 packets. var packetBuffer = buffer.substring(0, this._targetSize); // Keep the rest and reset counter. this._buffers = [ new Buffer(buffer.substring(this._targetSize)) ]; this._inputSize = this._buffers[0].length; // output header this.push('HELLO WORLD'); // output compressed packet buffer zlib.deflate(packetBuffer, function(err, compressed) { // TODO: handle `err` this.push(compressed); if (done) { done(); } }.bind(this)); }; // Main transformer logic: buffer chunks and dump them once the // target size has been met. transformer._transform = function(chunk, encoding, done) { this._buffers.push(chunk); this._inputSize += chunk.length; if (this._inputSize >= this._targetSize) { this._dump(done); } else { done(); } }; // Flush any remaining buffers. transformer._flush = function() { this._dump(); }; // Example: var fs = require('fs'); fs.createReadStream('depth_1000000') .pipe(zlib.createInflate()) .pipe(transformer) .pipe(fs.createWriteStream('depth_1000000.out'));
push
如果要写入的流(在本例中为文件输出流)缓冲了太多数据,则返回false.由于您正在写入磁盘,因此这是有道理的:您处理数据的速度比写出来的速度快.
当out
缓冲区已满时,您的转换流将无法推送,并开始缓冲数据本身.如果该缓冲区应该填充,那么inp
将开始填充.这就是事情应该如何运作.管道流只会像链中最慢的链接一样快地处理数据(一旦缓冲区已满).