|
|
|
'use strict';
|
|
|
|
require('../common');
|
|
|
|
var Readable = require('stream').Readable;
|
|
|
|
var assert = require('assert');
|
|
|
|
|
|
|
|
var s = new Readable({
|
|
|
|
highWaterMark: 20,
|
|
|
|
encoding: 'ascii'
|
|
|
|
});
|
|
|
|
|
|
|
|
var list = ['1', '2', '3', '4', '5', '6'];
|
|
|
|
|
|
|
|
s._read = function(n) {
|
|
|
|
var one = list.shift();
|
stream: There is no _read cb, there is only push
This makes it so that `stream.push(chunk)` is the only way to signal the
end of reading, removing the confusing disparity between the
callback-style _read method, and the fact that most real-world streams
do not have a 1:1 corollation between the "please give me data" event,
and the actual arrival of a chunk of data.
It is still possible, of course, to implement a `CallbackReadable` on
top of this. Simply provide a method like this as the callback:
function readCallback(er, chunk) {
if (er)
stream.emit('error', er);
else
stream.push(chunk);
}
However, *only* fs streams actually would behave in this way, so it
makes not a lot of sense to make TCP, TLS, HTTP, and all the rest have
to bend into this uncomfortable paradigm.
12 years ago
|
|
|
if (!one) {
|
|
|
|
s.push(null);
|
|
|
|
} else {
|
|
|
|
var two = list.shift();
|
|
|
|
s.push(one);
|
|
|
|
s.push(two);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
s.read(0);
|
|
|
|
|
|
|
|
// ACTUALLY [1, 3, 5, 6, 4, 2]
|
|
|
|
|
|
|
|
process.on('exit', function() {
|
|
|
|
assert.deepStrictEqual(s._readableState.buffer,
|
|
|
|
['1', '2', '3', '4', '5', '6']);
|
|
|
|
console.log('ok');
|
|
|
|
});
|