Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix CPU spin in the JavaScript implementation side #567

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion azure-pipelines/node.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ steps:
displayName: 🧪 yarn tslint
inputs:
projectDirectory: src/nerdbank-streams
arguments: tslint --project .
arguments: lint
6 changes: 4 additions & 2 deletions src/nerdbank-streams/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@
"scripts": {
"build": "tsc -p gulpfile.tsconfig.json && gulp",
"watch": "node ./node_modules/typescript/bin/tsc -p tsconfig.json -w",
"test": "jasmine"
"test": "jasmine",
"lint": "tslint --project ."
},
"devDependencies": {
"@types/jasmine": "^4.0.3",
Expand All @@ -52,6 +53,7 @@
"await-semaphore": "^0.1.3",
"cancellationtoken": "^2.0.1",
"caught": "^0.1.3",
"msgpack-lite": "^0.1.26"
"msgpack-lite": "^0.1.26",
"plexer": "^2.0.0"
}
}
17 changes: 2 additions & 15 deletions src/nerdbank-streams/src/FullDuplexStream.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Duplex, PassThrough } from "stream";
import duplexer = require('plexer')

export class FullDuplexStream {
public static CreatePair(): { first: Duplex, second: Duplex } {
Expand All @@ -11,20 +12,6 @@ export class FullDuplexStream {
}

public static Splice(readable: NodeJS.ReadableStream, writable: NodeJS.WritableStream): Duplex {
const duplex = new Duplex({
write(chunk, encoding, callback) {
writable.write(chunk, encoding, callback);
},

final(callback) {
writable.end(callback);
},
});

// All reads and events come directly from the readable stream.
duplex.read = readable.read.bind(readable);
duplex.on = readable.on.bind(readable) as any;

return duplex;
return duplexer(writable, readable)
}
}
10 changes: 8 additions & 2 deletions src/nerdbank-streams/src/MultiplexingStreamFormatters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -299,11 +299,17 @@ export class MultiplexingStreamV2Formatter extends MultiplexingStreamFormatter {
const readObject = this.reader.read();
if (readObject === null) {
const bytesAvailable = new Deferred<void>();
this.reader.once("readable", bytesAvailable.resolve.bind(bytesAvailable));
this.reader.once("end", streamEnded.resolve.bind(streamEnded));
const bytesAvailableCallback = bytesAvailable.resolve.bind(bytesAvailable);
const streamEndedCallback = streamEnded.resolve.bind(streamEnded);

this.reader.once("readable", bytesAvailableCallback);
this.reader.once("end", streamEndedCallback);
const endPromise = Promise.race([bytesAvailable.promise, streamEnded.promise]);
await (cancellationToken ? cancellationToken.racePromise(endPromise) : endPromise);

this.reader.removeListener("readable", bytesAvailableCallback);
this.reader.removeListener("end", streamEndedCallback);

if (bytesAvailable.isCompleted) {
continue;
}
Expand Down
158 changes: 128 additions & 30 deletions src/nerdbank-streams/src/Utilities.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import CancellationToken from "cancellationtoken";
import { Readable, Writable } from "stream";
import { Deferred } from "./Deferred";
import { IDisposableObservable } from "./IDisposableObservable";

export async function writeAsync(stream: NodeJS.WritableStream, chunk: any) {
Expand Down Expand Up @@ -35,20 +34,118 @@ export function writeSubstream(stream: NodeJS.WritableStream): NodeJS.WritableSt
});
}

export function readSubstream(stream: NodeJS.ReadableStream): NodeJS.ReadableStream {
/**
* Reads the next chunk from a stream, asynchronously waiting for more to be read if necessary.
* @param stream The stream to read from.
* @param cancellationToken A token whose cancellation will result in immediate rejection of the previously returned promise.
* @returns The result of reading from the stream. This will be null if the end of the stream is reached before any more can be read.
*/
export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: CancellationToken): Promise<string | Buffer | null> {
if (!(stream.isPaused() || (stream as Readable).readableFlowing !== true)) {
throw new Error('Stream must not be in flowing mode.');
}

const result = stream.read()
if (result) {
return Promise.resolve(result)
}

return new Promise<string | Buffer | null>((resolve, reject) => {
const ctReg = cancellationToken?.onCancelled(reason => {
cleanup();
reject(reason);
});
stream.once('data', onData);
stream.once('error', onError);
stream.once('end', onEnd);
stream.resume()

function onData(chunk) {
cleanup();
resolve(chunk);
}

function onError(...args) {
cleanup();
reject(...args);
}

function onEnd() {
cleanup();
resolve(null);
}

function cleanup() {
stream.pause();
stream.off('data', onData);
stream.off('error', onError);
stream.off('end', onEnd);
if (ctReg) {
ctReg();
}
}
})
}

/**
* Returns a readable stream that will read just a slice of some existing stream.
* @param stream The stream to read from.
* @param length The maximum number of bytes to read from the stream.
* @returns A stream that will read up to the given number of elements, leaving the rest in the underlying stream.
*/
export function sliceStream(stream: NodeJS.ReadableStream, length: number): Readable {
return new Readable({
async read(_: number) {
const lenBuffer = await getBufferFrom(stream, 4);
const dv = new DataView(lenBuffer.buffer, lenBuffer.byteOffset, lenBuffer.length);
const chunkSize = dv.getUint32(0, false);
if (chunkSize === 0) {
if (length > 0) {
const chunk = stream.read() ?? await readAsync(stream);
if (!chunk) {
// We've reached the end of the source stream.
this.push(null);
return;
}

const countToConsume = Math.min(length, chunk.length)
length -= countToConsume
stream.unshift(chunk.slice(countToConsume))
if (this.push(chunk.slice(0, countToConsume)) && length === 0) {
// Save another call later by informing immediately that we're at the end of the stream.
this.push(null);
}
} else {
this.push(null);
return;
}
},
});
}

// TODO: make this *stream* instead of read as an atomic chunk.
const payload = await getBufferFrom(stream, chunkSize);
this.push(payload);
export function readSubstream(stream: NodeJS.ReadableStream): NodeJS.ReadableStream {
let currentSlice: Readable | null = null
return new Readable({
async read(_: number) {
while (true) {
if (currentSlice === null) {
const lenBuffer = await getBufferFrom(stream, 4);
const dv = new DataView(lenBuffer.buffer, lenBuffer.byteOffset, lenBuffer.length);
const length = dv.getUint32(0, false);
if (length === 0) {
// We've reached the end of the substream.
this.push(null);
return;
}

currentSlice = sliceStream(stream, length)
}

const chunk = await readAsync(currentSlice);
if (!chunk) {
// We've reached the end of this chunk. We'll have to read the next header.
currentSlice = null;
continue;
}

this.push(chunk);
return;
}
},
});
}
Expand All @@ -71,33 +168,34 @@ export async function getBufferFrom(
allowEndOfStream: boolean = false,
cancellationToken?: CancellationToken): Promise<Buffer | null> {

const streamEnded = new Deferred<void>();
while (size > 0) {
cancellationToken?.throwIfCancelled();

const readBuffer = readable.read(size) as Buffer;
if (readBuffer === null) {
const bytesAvailable = new Deferred<void>();
readable.once("readable", bytesAvailable.resolve.bind(bytesAvailable));
readable.once("end", streamEnded.resolve.bind(streamEnded));
const endPromise = Promise.race([bytesAvailable.promise, streamEnded.promise]);
await (cancellationToken ? cancellationToken.racePromise(endPromise) : endPromise);
if (size === 0) {
return Buffer.alloc(0)
}

if (bytesAvailable.isCompleted) {
continue;
}
}
const initialData = readable.read(size) as Buffer | null;
if (initialData) {
return initialData;
}

if (!allowEndOfStream) {
if (!readBuffer || readBuffer.length < size) {
throw new Error("Stream terminated before required bytes were read.");
let totalBytesRead = 0
const result = Buffer.alloc(size);
const streamSlice = sliceStream(readable, size);
while (totalBytesRead < size) {
const chunk = await readAsync(streamSlice, cancellationToken) as Buffer | null
if (chunk === null) {
// We reached the end prematurely.
if (allowEndOfStream) {
return totalBytesRead === 0 ? null : result.subarray(0, totalBytesRead)
} else {
throw new Error(`End of stream encountered after only ${totalBytesRead} bytes when ${size} were expected.`);
}
}

return readBuffer;
chunk.copy(result, totalBytesRead);
totalBytesRead += chunk.length;
}

return Buffer.from([]);
return result;
}

export function throwIfDisposed(value: IDisposableObservable) {
Expand Down
2 changes: 1 addition & 1 deletion src/nerdbank-streams/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ export { FullDuplexStream } from "./FullDuplexStream";
export { IDisposableObservable } from "./IDisposableObservable";
export { MultiplexingStream } from "./MultiplexingStream";
export { MultiplexingStreamOptions } from "./MultiplexingStreamOptions";
export { writeSubstream, readSubstream } from "./Utilities";
export { writeSubstream, readSubstream, readAsync, sliceStream } from "./Utilities";
export { QualifiedChannelId, ChannelSource } from "./QualifiedChannelId";
39 changes: 30 additions & 9 deletions src/nerdbank-streams/src/tests/FullDuplexStream.spec.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { PassThrough, Readable, Writable } from "stream";
import { Deferred } from "../Deferred";
import { FullDuplexStream } from "../FullDuplexStream";
import { getBufferFrom } from "../Utilities";
import { getBufferFrom, readAsync } from "../Utilities";
import { delay } from "./Timeout";

describe("FullDuplexStream.CreatePair", () => {

Expand All @@ -23,20 +24,20 @@ describe("FullDuplexStream.CreatePair", () => {
await endPropagatesEndEvent(pair.second, pair.first);
});

it("stream1 write end leads to stream2 finish event", async () => {
it("stream1 write end leads to stream1 finish event", async () => {
const pair = FullDuplexStream.CreatePair();
await endPropagatesFinishEvent(pair.first, pair.second);
await endPropagatesFinishEvent(pair.second, pair.first);
await endRaisesFinishEvent(pair.first);
await endRaisesFinishEvent(pair.second);
});

async function writePropagation(first: Writable, second: Readable): Promise<void> {
first.write("abc");
expect(second.read()).toEqual(Buffer.from("abc"));
expect(await readAsync(second)).toEqual(Buffer.from("abc"));
}

async function endPropagatesFinishEvent(first: Writable, second: Readable): Promise<void> {
async function endRaisesFinishEvent(first: Writable): Promise<void> {
const signal = new Deferred<void>();
second.once("finish", () => {
first.once("finish", () => {
signal.resolve();
});
expect(signal.isCompleted).toBe(false);
Expand All @@ -62,8 +63,8 @@ describe("FullDuplexStream.Splice", () => {
let duplex: NodeJS.ReadWriteStream;

beforeEach(() => {
readable = new PassThrough();
writable = new PassThrough();
readable = new PassThrough({ writableHighWaterMark: 8 });
writable = new PassThrough({ writableHighWaterMark: 8 });
duplex = FullDuplexStream.Splice(readable, writable);
});

Expand All @@ -86,4 +87,24 @@ describe("FullDuplexStream.Splice", () => {
buffer = await getBufferFrom(writable, 1, true);
expect(buffer).toBeNull();
});

it("unshift", async () => {
duplex.unshift(Buffer.from([1, 2, 3]))
const result = duplex.read()
expect(result).toEqual(Buffer.from([1, 2, 3]))
})

it("Read should yield when data is not ready", async () => {
const task = writeToStream(duplex, "abcdefgh", 4);
const buffer = await getBufferFrom(writable, 32);
await task;
expect(buffer.length).toEqual(32);
});

async function writeToStream(stream: NodeJS.ReadWriteStream, message: string, repeat: number) {
while (repeat--) {
stream.write(message);
await delay(2);
}
}
});
22 changes: 2 additions & 20 deletions src/nerdbank-streams/src/tests/MultiplexingStream.Interop.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { Deferred } from "../Deferred";
import { FullDuplexStream } from "../FullDuplexStream";
import { MultiplexingStream } from "../MultiplexingStream";
import { ChannelOptions } from "../ChannelOptions";
import { readAsync } from "../Utilities";

[1, 2, 3].forEach(protocolMajorVersion => {
describe(`MultiplexingStream v${protocolMajorVersion} (interop) `, () => {
Expand Down Expand Up @@ -101,30 +102,11 @@ import { ChannelOptions } from "../ChannelOptions";
return deferred.promise;
}

async function readAsync(readable: NodeJS.ReadableStream): Promise<Buffer | null> {
let readBuffer = readable.read() as Buffer;

if (readBuffer === null) {
const bytesAvailable = new Deferred<void>();
const streamEnded = new Deferred<void>();
readable.once("readable", bytesAvailable.resolve.bind(bytesAvailable));
readable.once("end", streamEnded.resolve.bind(streamEnded));
await Promise.race([bytesAvailable.promise, streamEnded.promise]);
if (bytesAvailable.isCompleted) {
readBuffer = readable.read() as Buffer;
} else {
return null;
}
}

return readBuffer;
}

async function readLineAsync(readable: NodeJS.ReadableStream): Promise<string | null> {
const buffers: Buffer[] = [];

while (true) {
const segment = await readAsync(readable);
const segment = await readAsync(readable) as Buffer | null;
if (segment === null) {
break;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,13 @@ async function readAsync(readable: NodeJS.ReadableStream): Promise<Buffer | null
if (readBuffer === null) {
const bytesAvailable = new Deferred<void>();
const streamEnded = new Deferred<void>();
readable.once("readable", bytesAvailable.resolve.bind(bytesAvailable));
readable.once("end", streamEnded.resolve.bind(streamEnded));
const bytesAvailableCallback = bytesAvailable.resolve.bind(bytesAvailable);
const streamEndedCallback = streamEnded.resolve.bind(streamEnded);
readable.once("readable", bytesAvailableCallback);
readable.once("end", streamEndedCallback);
await Promise.race([bytesAvailable.promise, streamEnded.promise]);
readable.removeListener("readable", bytesAvailableCallback);
readable.removeListener("end", streamEndedCallback);
lifengl marked this conversation as resolved.
Show resolved Hide resolved
if (bytesAvailable.isCompleted) {
readBuffer = readable.read() as Buffer;
} else {
Expand Down
Loading