stream: improve Web Compression spec compliance · nodejs/node@4bdcaf2 · GitHub
Skip to content

Commit 4bdcaf2

Browse files
panvaaduh95
authored andcommitted
stream: improve Web Compression spec compliance
Pass rejectGarbageAfterEnd: true to createInflateRaw() and createBrotliDecompress(), matching the behavior already in place for deflate and gzip. The Compression Streams spec treats any data following a valid compressed payload as an error. When the underlying Node.js stream throws synchronously from write() (e.g. zlib rejects an invalid chunk type), destroy the stream so that the readable side is also errored. Without this, the readable side hangs forever waiting for data that will never arrive. Introduce a kValidateChunk callback option in the webstreams adapter layer. Compression streams use this to validate that chunks are BufferSource instances not backed by SharedArrayBuffer, replacing the previous monkey-patching of the underlying handle's write method. Unskip WPT compression bad-chunks tests which now run instead of hang and mark the remaining expected failures. PR-URL: #62107 Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Mattias Buelens <mattias@buelens.com> Reviewed-By: René <contact.9a5d6388@renegade334.me.uk>
1 parent 82770cb commit 4bdcaf2

6 files changed

Lines changed: 302 additions & 23 deletions

File tree

lib/internal/webstreams/adapters.js

Lines changed: 36 additions & 8 deletions

lib/internal/webstreams/compression.js

Lines changed: 38 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,28 @@ const {
77

88
const {
99
newReadableWritablePairFromDuplex,
10+
kValidateChunk,
11+
kDestroyOnSyncError,
1012
} = require('internal/webstreams/adapters');
1113

1214
const { customInspect } = require('internal/webstreams/util');
1315

16+
const {
17+
isArrayBufferView,
18+
isSharedArrayBuffer,
19+
} = require('internal/util/types');
20+
1421
const {
1522
customInspectSymbol: kInspect,
1623
kEnumerableProperty,
1724
} = require('internal/util');
1825

26+
const {
27+
codes: {
28+
ERR_INVALID_ARG_TYPE,
29+
},
30+
} = require('internal/errors');
31+
1932
const { createEnumConverter } = require('internal/webidl');
2033

2134
let zlib;
@@ -24,6 +37,18 @@ function lazyZlib() {
2437
return zlib;
2538
}
2639

40+
// Per the Compression Streams spec, chunks must be BufferSource
41+
// (ArrayBuffer or ArrayBufferView not backed by SharedArrayBuffer).
42+
function validateBufferSourceChunk(chunk) {
43+
if (isArrayBufferView(chunk) && isSharedArrayBuffer(chunk.buffer)) {
44+
throw new ERR_INVALID_ARG_TYPE(
45+
'chunk',
46+
['ArrayBuffer', 'Buffer', 'TypedArray', 'DataView'],
47+
chunk,
48+
);
49+
}
50+
}
51+
2752
const formatConverter = createEnumConverter('CompressionFormat', [
2853
'deflate',
2954
'deflate-raw',
@@ -62,7 +87,10 @@ class CompressionStream {
6287
this.#handle = lazyZlib().createBrotliCompress();
6388
break;
6489
}
65-
this.#transform = newReadableWritablePairFromDuplex(this.#handle);
90+
this.#transform = newReadableWritablePairFromDuplex(this.#handle, {
91+
[kValidateChunk]: validateBufferSourceChunk,
92+
[kDestroyOnSyncError]: true,
93+
});
6694
}
6795

6896
/**
@@ -108,25 +136,24 @@ class DecompressionStream {
108136
});
109137
break;
110138
case 'deflate-raw':
111-
this.#handle = lazyZlib().createInflateRaw();
139+
this.#handle = lazyZlib().createInflateRaw({
140+
rejectGarbageAfterEnd: true,
141+
});
112142
break;
113143
case 'gzip':
114144
this.#handle = lazyZlib().createGunzip({
115145
rejectGarbageAfterEnd: true,
116146
});
117147
break;
118148
case 'brotli':
119-
this.#handle = lazyZlib().createBrotliDecompress();
149+
this.#handle = lazyZlib().createBrotliDecompress({
150+
rejectGarbageAfterEnd: true,
151+
});
120152
break;
121153
}
122-
this.#transform = newReadableWritablePairFromDuplex(this.#handle);
123-
124-
this.#handle.on('error', (err) => {
125-
if (this.#transform?.writable &&
126-
!this.#transform.writable.locked &&
127-
typeof this.#transform.writable.abort === 'function') {
128-
this.#transform.writable.abort(err);
129-
}
154+
this.#transform = newReadableWritablePairFromDuplex(this.#handle, {
155+
[kValidateChunk]: validateBufferSourceChunk,
156+
[kDestroyOnSyncError]: true,
130157
});
131158
}
132159

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
'use strict';
2+
// Flags: --no-warnings --expose-internals
3+
require('../common');
4+
const assert = require('assert');
5+
const test = require('node:test');
6+
const { Duplex, Writable } = require('stream');
7+
const {
8+
newWritableStreamFromStreamWritable,
9+
newReadableWritablePairFromDuplex,
10+
} = require('internal/webstreams/adapters');
11+
12+
// Verify that when the underlying Node.js stream throws synchronously from
13+
// write(), the writable web stream properly rejects but does not destroy
14+
// the stream (destroy-on-sync-throw is only used internally by
15+
// CompressionStream/DecompressionStream).
16+
17+
test('WritableStream from Node.js stream handles sync write throw', async () => {
18+
const error = new TypeError('invalid chunk');
19+
const writable = new Writable({
20+
write() {
21+
throw error;
22+
},
23+
});
24+
25+
const ws = newWritableStreamFromStreamWritable(writable);
26+
const writer = ws.getWriter();
27+
28+
await assert.rejects(writer.write('bad'), (err) => {
29+
assert.strictEqual(err, error);
30+
return true;
31+
});
32+
33+
// Standalone writable should not be destroyed on sync write error
34+
assert.strictEqual(writable.destroyed, false);
35+
});
36+
37+
test('Duplex-backed pair does NOT destroy on sync write throw', async () => {
38+
const error = new TypeError('invalid chunk');
39+
const duplex = new Duplex({
40+
read() {},
41+
write() {
42+
throw error;
43+
},
44+
});
45+
46+
const { writable, readable } = newReadableWritablePairFromDuplex(duplex);
47+
const writer = writable.getWriter();
48+
49+
await assert.rejects(writer.write('bad'), (err) => {
50+
assert.strictEqual(err, error);
51+
return true;
52+
});
53+
54+
// A plain Duplex should NOT be destroyed on sync write error
55+
assert.strictEqual(duplex.destroyed, false);
56+
57+
// The readable side should still be usable
58+
const reader = readable.getReader();
59+
reader.cancel();
60+
});
61+
62+
test('WritableStream from Node.js stream - valid writes still work', async () => {
63+
const chunks = [];
64+
const writable = new Writable({
65+
write(chunk, _encoding, cb) {
66+
chunks.push(chunk);
67+
cb();
68+
},
69+
});
70+
71+
const ws = newWritableStreamFromStreamWritable(writable);
72+
const writer = ws.getWriter();
73+
74+
await writer.write(Buffer.from('hello'));
75+
await writer.write(Buffer.from(' world'));
76+
await writer.close();
77+
78+
assert.strictEqual(Buffer.concat(chunks).toString(), 'hello world');
79+
});
Lines changed: 57 additions & 0 deletions

0 commit comments

Comments
 (0)