From 4dea76e0499563725d5ff0e0f9a46216b7230e6f Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sat, 24 Jan 2026 12:04:11 +0100 Subject: [PATCH] Revert "inspector: fix compressed responses" This reverts commit 186c7a9c74eaeb5742792d00fc098ecf2e971ed9. --- lib/internal/inspector/network.js | 30 -- lib/internal/inspector/network_http.js | 83 ++---- lib/internal/inspector/network_http2.js | 84 ++---- .../test-inspector-network-http-compressed.js | 259 ---------------- ...test-inspector-network-http2-compressed.js | 282 ------------------ 5 files changed, 35 insertions(+), 703 deletions(-) delete mode 100644 test/parallel/test-inspector-network-http-compressed.js delete mode 100644 test/parallel/test-inspector-network-http2-compressed.js diff --git a/lib/internal/inspector/network.js b/lib/internal/inspector/network.js index 8fc2f2cfcf9bfd..ddd5b4750bee8d 100644 --- a/lib/internal/inspector/network.js +++ b/lib/internal/inspector/network.js @@ -10,15 +10,8 @@ const { const dc = require('diagnostics_channel'); const { now } = require('internal/perf/utils'); const { MIMEType } = require('internal/mime'); -const { - createGunzip, - createInflate, - createBrotliDecompress, - createZstdDecompress, -} = require('zlib'); const kInspectorRequestId = Symbol('kInspectorRequestId'); -const kContentEncoding = Symbol('kContentEncoding'); // https://chromedevtools.github.io/devtools-protocol/1-3/Network/#type-ResourceType const kResourceType = { @@ -77,27 +70,6 @@ function sniffMimeType(contentType) { }; } -/** - * Creates a decompression stream based on the content encoding. - * @param {string} encoding - The content encoding (e.g., 'gzip', 'deflate', 'br', 'zstd'). - * @returns {import('stream').Transform|null} - A decompression stream or null if encoding is not supported. - */ -function createDecompressor(encoding) { - switch (encoding) { - case 'gzip': - case 'x-gzip': - return createGunzip(); - case 'deflate': - return createInflate(); - case 'br': - return createBrotliDecompress(); - case 'zstd': - return createZstdDecompress(); - default: - return null; - } -} - function registerDiagnosticChannels(listenerPairs) { function enable() { ArrayPrototypeForEach(listenerPairs, ({ 0: channel, 1: listener }) => { @@ -119,11 +91,9 @@ function registerDiagnosticChannels(listenerPairs) { module.exports = { kInspectorRequestId, - kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, - createDecompressor, }; diff --git a/lib/internal/inspector/network_http.js b/lib/internal/inspector/network_http.js index e530ee8d7e0f13..8d324c8c544eea 100644 --- a/lib/internal/inspector/network_http.js +++ b/lib/internal/inspector/network_http.js @@ -10,13 +10,11 @@ const { const { kInspectorRequestId, - kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, - createDecompressor, } = require('internal/inspector/network'); const { Network } = require('inspector'); const EventEmitter = require('events'); @@ -29,7 +27,6 @@ const convertHeaderObject = (headers = {}) => { let host; let charset; let mimeType; - let contentEncoding; const dict = {}; for (const { 0: key, 1: value } of ObjectEntries(headers)) { const lowerCasedKey = key.toLowerCase(); @@ -41,9 +38,6 @@ const convertHeaderObject = (headers = {}) => { charset = result.charset; mimeType = result.mimeType; } - if (lowerCasedKey === 'content-encoding') { - contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined; - } if (typeof value === 'string') { dict[key] = value; } else if (ArrayIsArray(value)) { @@ -56,7 +50,7 @@ const convertHeaderObject = (headers = {}) => { dict[key] = String(value); } } - return [dict, host, charset, mimeType, contentEncoding]; + return [dict, host, charset, mimeType]; }; /** @@ -111,10 +105,7 @@ function onClientResponseFinish({ request, response }) { return; } - const { 0: headers, 2: charset, 3: mimeType, 4: contentEncoding } = convertHeaderObject(response.headers); - - // Store content encoding on the request for later use - request[kContentEncoding] = contentEncoding; + const { 0: headers, 2: charset, 3: mimeType } = convertHeaderObject(response.headers); Network.responseReceived({ requestId: request[kInspectorRequestId], @@ -130,64 +121,24 @@ function onClientResponseFinish({ request, response }) { }, }); - // Create a decompressor if the response is compressed - const decompressor = createDecompressor(contentEncoding); - - if (decompressor) { - // Pipe decompressed data to DevTools - decompressor.on('data', (decompressedChunk) => { - Network.dataReceived({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: decompressedChunk.byteLength, - encodedDataLength: decompressedChunk.byteLength, - data: decompressedChunk, - }); - }); - - // Handle decompression errors gracefully - fall back to raw data - decompressor.on('error', () => { - // If decompression fails, the raw data has already been sent via the fallback - }); - - // Unlike response.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(response, 'data', (chunk) => { - // Feed the chunk into the decompressor - decompressor.write(chunk); - }); - - // Wait until the response body is consumed by user code. - response.once('end', () => { - // End the decompressor stream - decompressor.end(); - decompressor.once('end', () => { - Network.loadingFinished({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), - }); - }); - }); - } else { - // No decompression needed, send data directly - // Unlike response.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(response, 'data', (chunk) => { - Network.dataReceived({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: chunk.byteLength, - encodedDataLength: chunk.byteLength, - data: chunk, - }); + // Unlike response.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(response, 'data', (chunk) => { + Network.dataReceived({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: chunk.byteLength, + encodedDataLength: chunk.byteLength, + data: chunk, }); + }); - // Wait until the response body is consumed by user code. - response.once('end', () => { - Network.loadingFinished({ - requestId: request[kInspectorRequestId], - timestamp: getMonotonicTime(), - }); + // Wait until the response body is consumed by user code. + response.once('end', () => { + Network.loadingFinished({ + requestId: request[kInspectorRequestId], + timestamp: getMonotonicTime(), }); - } + }); } module.exports = registerDiagnosticChannels([ diff --git a/lib/internal/inspector/network_http2.js b/lib/internal/inspector/network_http2.js index 3e7352df51233c..0f0751c44dd1f9 100644 --- a/lib/internal/inspector/network_http2.js +++ b/lib/internal/inspector/network_http2.js @@ -10,18 +10,15 @@ const { const { kInspectorRequestId, - kContentEncoding, kResourceType, getMonotonicTime, getNextRequestId, registerDiagnosticChannels, sniffMimeType, - createDecompressor, } = require('internal/inspector/network'); const { Network } = require('inspector'); const { HTTP2_HEADER_AUTHORITY, - HTTP2_HEADER_CONTENT_ENCODING, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_COOKIE, HTTP2_HEADER_METHOD, @@ -45,7 +42,6 @@ function convertHeaderObject(headers = {}) { let statusCode; let charset; let mimeType; - let contentEncoding; const dict = {}; for (const { 0: key, 1: value } of ObjectEntries(headers)) { @@ -65,8 +61,6 @@ function convertHeaderObject(headers = {}) { const result = sniffMimeType(value); charset = result.charset; mimeType = result.mimeType; - } else if (lowerCasedKey === HTTP2_HEADER_CONTENT_ENCODING) { - contentEncoding = typeof value === 'string' ? value.toLowerCase() : undefined; } if (typeof value === 'string') { @@ -84,7 +78,7 @@ function convertHeaderObject(headers = {}) { const url = `${scheme}://${authority}${path}`; - return [dict, url, method, statusCode, charset, mimeType, contentEncoding]; + return [dict, url, method, statusCode, charset, mimeType]; } /** @@ -200,16 +194,7 @@ function onClientStreamFinish({ stream, headers }) { return; } - const { - 0: convertedHeaderObject, - 3: statusCode, - 4: charset, - 5: mimeType, - 6: contentEncoding, - } = convertHeaderObject(headers); - - // Store content encoding on the stream for later use - stream[kContentEncoding] = contentEncoding; + const { 0: convertedHeaderObject, 3: statusCode, 4: charset, 5: mimeType } = convertHeaderObject(headers); Network.responseReceived({ requestId: stream[kInspectorRequestId], @@ -225,56 +210,23 @@ function onClientStreamFinish({ stream, headers }) { }, }); - // Create a decompressor if the response is compressed - const decompressor = createDecompressor(contentEncoding); - - if (decompressor) { - // Pipe decompressed data to DevTools - decompressor.on('data', (decompressedChunk) => { - Network.dataReceived({ - requestId: stream[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: decompressedChunk.byteLength, - encodedDataLength: decompressedChunk.byteLength, - data: decompressedChunk, - }); - }); - - // Handle decompression errors gracefully - decompressor.on('error', () => { - // If decompression fails, the raw data has already been sent via the fallback + // Unlike stream.on('data', ...), this does not put the stream into flowing mode. + EventEmitter.prototype.on.call(stream, 'data', (chunk) => { + /** + * When a chunk of the response body has been received, cache it until `getResponseBody` request + * https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or + * stream it with `streamResourceContent` request. + * https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent + */ + + Network.dataReceived({ + requestId: stream[kInspectorRequestId], + timestamp: getMonotonicTime(), + dataLength: chunk.byteLength, + encodedDataLength: chunk.byteLength, + data: chunk, }); - - // Unlike stream.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(stream, 'data', (chunk) => { - // Feed the chunk into the decompressor - decompressor.write(chunk); - }); - - // End the decompressor when the stream closes - stream.once('end', () => { - decompressor.end(); - }); - } else { - // No decompression needed, send data directly - // Unlike stream.on('data', ...), this does not put the stream into flowing mode. - EventEmitter.prototype.on.call(stream, 'data', (chunk) => { - /** - * When a chunk of the response body has been received, cache it until `getResponseBody` request - * https://chromedevtools.github.io/devtools-protocol/1-3/Network/#method-getResponseBody or - * stream it with `streamResourceContent` request. - * https://chromedevtools.github.io/devtools-protocol/tot/Network/#method-streamResourceContent - */ - - Network.dataReceived({ - requestId: stream[kInspectorRequestId], - timestamp: getMonotonicTime(), - dataLength: chunk.byteLength, - encodedDataLength: chunk.byteLength, - data: chunk, - }); - }); - } + }); } /** diff --git a/test/parallel/test-inspector-network-http-compressed.js b/test/parallel/test-inspector-network-http-compressed.js deleted file mode 100644 index 7c305694133c68..00000000000000 --- a/test/parallel/test-inspector-network-http-compressed.js +++ /dev/null @@ -1,259 +0,0 @@ -// Flags: --inspect=0 --experimental-network-inspection -'use strict'; -const common = require('../common'); - -common.skipIfInspectorDisabled(); - -const assert = require('node:assert'); -const { once } = require('node:events'); -const fixtures = require('../common/fixtures'); -const http = require('node:http'); -const https = require('node:https'); -const zlib = require('node:zlib'); -const inspector = require('node:inspector/promises'); - -const session = new inspector.Session(); -session.connect(); - -const plainTextBody = 'hello world compressed\n'; - -const setResponseHeaders = (res, encoding) => { - res.setHeader('server', 'node'); - res.setHeader('Content-Type', 'text/plain; charset=utf-8'); - if (encoding) { - res.setHeader('Content-Encoding', encoding); - } -}; - -const handleRequest = (req, res) => { - const path = req.url; - switch (path) { - case '/gzip': - setResponseHeaders(res, 'gzip'); - res.writeHead(200); - zlib.gzip(plainTextBody, common.mustSucceed((compressed) => { - res.end(compressed); - })); - break; - case '/x-gzip': - setResponseHeaders(res, 'x-gzip'); - res.writeHead(200); - zlib.gzip(plainTextBody, common.mustSucceed((compressed) => { - res.end(compressed); - })); - break; - case '/deflate': - setResponseHeaders(res, 'deflate'); - res.writeHead(200); - zlib.deflate(plainTextBody, common.mustSucceed((compressed) => { - res.end(compressed); - })); - break; - case '/br': - setResponseHeaders(res, 'br'); - res.writeHead(200); - zlib.brotliCompress(plainTextBody, common.mustSucceed((compressed) => { - res.end(compressed); - })); - break; - case '/zstd': - setResponseHeaders(res, 'zstd'); - res.writeHead(200); - zlib.zstdCompress(plainTextBody, common.mustSucceed((compressed) => { - res.end(compressed); - })); - break; - case '/plain': - setResponseHeaders(res); - res.writeHead(200); - res.end(plainTextBody); - break; - case '/invalid-gzip': - // Send invalid data with gzip content-encoding to trigger decompression error - setResponseHeaders(res, 'gzip'); - res.writeHead(200); - res.end('this is not valid gzip data'); - break; - default: - assert.fail(`Unexpected path: ${path}`); - } -}; - -const httpServer = http.createServer(handleRequest); - -const httpsServer = https.createServer({ - key: fixtures.readKey('agent1-key.pem'), - cert: fixtures.readKey('agent1-cert.pem') -}, handleRequest); - -const terminate = () => { - session.disconnect(); - httpServer.close(); - httpsServer.close(); - inspector.close(); -}; - -function verifyResponseReceived({ method, params }, expect) { - assert.strictEqual(method, 'Network.responseReceived'); - assert.ok(params.requestId.startsWith('node-network-event-')); - assert.strictEqual(params.response.status, 200); - assert.strictEqual(params.response.url, expect.url); - assert.strictEqual(params.response.mimeType, 'text/plain'); - assert.strictEqual(params.response.charset, 'utf-8'); - return params; -} - -function verifyLoadingFinished({ method, params }) { - assert.strictEqual(method, 'Network.loadingFinished'); - assert.ok(params.requestId.startsWith('node-network-event-')); - return params; -} - -async function testInvalidCompressedResponse(server) { - const port = server.address().port; - const protocol = server === httpsServer ? 'https' : 'http'; - const path = '/invalid-gzip'; - const url = `${protocol}://127.0.0.1:${port}${path}`; - - const responseReceivedFuture = once(session, 'Network.responseReceived') - .then(([event]) => verifyResponseReceived(event, { url })); - - const client = protocol === 'https' ? https : http; - - await new Promise((resolve) => { - const req = client.get({ - host: '127.0.0.1', - port, - path, - rejectUnauthorized: false, - }, (res) => { - // Consume the response to trigger the decompression error in inspector - res.on('data', () => {}); - res.on('end', resolve); - }); - req.on('error', resolve); - }); - - await responseReceivedFuture; - // Note: loadingFinished is not emitted when decompression fails, - // but this test ensures the error handler is triggered for coverage. -} - -async function testCompressedResponse(server, encoding, path) { - const port = server.address().port; - const protocol = server === httpsServer ? 'https' : 'http'; - const url = `${protocol}://127.0.0.1:${port}${path}`; - - const responseReceivedFuture = once(session, 'Network.responseReceived') - .then(([event]) => verifyResponseReceived(event, { url })); - - const loadingFinishedFuture = once(session, 'Network.loadingFinished') - .then(([event]) => verifyLoadingFinished(event)); - - const client = protocol === 'https' ? https : http; - const chunks = []; - - await new Promise((resolve, reject) => { - const req = client.get({ - host: '127.0.0.1', - port, - path, - rejectUnauthorized: false, - }, (res) => { - // Manually decompress the response to verify it works for user code - let decompressor; - if (encoding === 'gzip' || encoding === 'x-gzip') { - decompressor = zlib.createGunzip(); - } else if (encoding === 'deflate') { - decompressor = zlib.createInflate(); - } else if (encoding === 'br') { - decompressor = zlib.createBrotliDecompress(); - } else if (encoding === 'zstd') { - decompressor = zlib.createZstdDecompress(); - } - - if (decompressor) { - res.pipe(decompressor); - decompressor.on('data', (chunk) => chunks.push(chunk)); - decompressor.on('end', resolve); - decompressor.on('error', reject); - } else { - res.on('data', (chunk) => chunks.push(chunk)); - res.on('end', resolve); - } - }); - req.on('error', reject); - }); - - // Verify user code can read the decompressed response - const body = Buffer.concat(chunks).toString(); - assert.strictEqual(body, plainTextBody); - - const responseReceived = await responseReceivedFuture; - await loadingFinishedFuture; - - // Verify the inspector receives the decompressed response body - const responseBody = await session.post('Network.getResponseBody', { - requestId: responseReceived.requestId, - }); - assert.strictEqual(responseBody.base64Encoded, false); - assert.strictEqual(responseBody.body, plainTextBody); -} - -const testNetworkInspection = async () => { - // Test gzip - await testCompressedResponse(httpServer, 'gzip', '/gzip'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, 'gzip', '/gzip'); - session.removeAllListeners(); - - // Test x-gzip (alternate gzip encoding) - await testCompressedResponse(httpServer, 'x-gzip', '/x-gzip'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, 'x-gzip', '/x-gzip'); - session.removeAllListeners(); - - // Test deflate - await testCompressedResponse(httpServer, 'deflate', '/deflate'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, 'deflate', '/deflate'); - session.removeAllListeners(); - - // Test brotli - await testCompressedResponse(httpServer, 'br', '/br'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, 'br', '/br'); - session.removeAllListeners(); - - // Test zstd - await testCompressedResponse(httpServer, 'zstd', '/zstd'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, 'zstd', '/zstd'); - session.removeAllListeners(); - - // Test plain (no compression) - await testCompressedResponse(httpServer, null, '/plain'); - session.removeAllListeners(); - await testCompressedResponse(httpsServer, null, '/plain'); - session.removeAllListeners(); - - // Test invalid compressed data (triggers decompression error handler) - await testInvalidCompressedResponse(httpServer); - session.removeAllListeners(); - await testInvalidCompressedResponse(httpsServer); - session.removeAllListeners(); -}; - -httpServer.listen(0, () => { - httpsServer.listen(0, async () => { - try { - await session.post('Network.enable'); - await testNetworkInspection(); - await session.post('Network.disable'); - } catch (e) { - assert.fail(e); - } finally { - terminate(); - } - }); -}); diff --git a/test/parallel/test-inspector-network-http2-compressed.js b/test/parallel/test-inspector-network-http2-compressed.js deleted file mode 100644 index 001f2a0decef87..00000000000000 --- a/test/parallel/test-inspector-network-http2-compressed.js +++ /dev/null @@ -1,282 +0,0 @@ -// Flags: --inspect=0 --experimental-network-inspection -'use strict'; -const common = require('../common'); -if (!common.hasCrypto) - common.skip('missing crypto'); -common.skipIfInspectorDisabled(); - -const assert = require('node:assert'); -const { once } = require('node:events'); -const fixtures = require('../common/fixtures'); -const http2 = require('node:http2'); -const zlib = require('node:zlib'); -const inspector = require('node:inspector/promises'); - -const session = new inspector.Session(); -session.connect(); - -const plainTextBody = 'hello world compressed http2\n'; - -const handleStream = common.mustCallAtLeast((stream, headers) => { - const path = headers[http2.constants.HTTP2_HEADER_PATH]; - - const responseHeaders = { - [http2.constants.HTTP2_HEADER_STATUS]: 200, - [http2.constants.HTTP2_HEADER_CONTENT_TYPE]: 'text/plain; charset=utf-8', - }; - - switch (path) { - case '/gzip': - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'gzip'; - stream.respond(responseHeaders); - zlib.gzip(plainTextBody, common.mustSucceed((compressed) => { - stream.end(compressed); - })); - break; - case '/x-gzip': - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'x-gzip'; - stream.respond(responseHeaders); - zlib.gzip(plainTextBody, common.mustSucceed((compressed) => { - stream.end(compressed); - })); - break; - case '/deflate': - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'deflate'; - stream.respond(responseHeaders); - zlib.deflate(plainTextBody, common.mustSucceed((compressed) => { - stream.end(compressed); - })); - break; - case '/br': - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'br'; - stream.respond(responseHeaders); - zlib.brotliCompress(plainTextBody, common.mustSucceed((compressed) => { - stream.end(compressed); - })); - break; - case '/zstd': - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'zstd'; - stream.respond(responseHeaders); - zlib.zstdCompress(plainTextBody, common.mustSucceed((compressed) => { - stream.end(compressed); - })); - break; - case '/plain': - stream.respond(responseHeaders); - stream.end(plainTextBody); - break; - case '/invalid-gzip': - // Send invalid data with gzip content-encoding to trigger decompression error - responseHeaders[http2.constants.HTTP2_HEADER_CONTENT_ENCODING] = 'gzip'; - stream.respond(responseHeaders); - stream.end('this is not valid gzip data'); - break; - default: - assert.fail(`Unexpected path: ${path}`); - } -}); - -const http2Server = http2.createServer(); -const http2SecureServer = http2.createSecureServer({ - key: fixtures.readKey('agent1-key.pem'), - cert: fixtures.readKey('agent1-cert.pem'), -}); - -http2Server.on('stream', handleStream); -http2SecureServer.on('stream', handleStream); - -const terminate = () => { - session.disconnect(); - http2Server.close(); - http2SecureServer.close(); - inspector.close(); -}; - -function verifyResponseReceived({ method, params }, expect) { - assert.strictEqual(method, 'Network.responseReceived'); - assert.ok(params.requestId.startsWith('node-network-event-')); - assert.strictEqual(params.response.status, 200); - assert.strictEqual(params.response.url, expect.url); - assert.strictEqual(params.response.mimeType, 'text/plain'); - assert.strictEqual(params.response.charset, 'utf-8'); - return params; -} - -function verifyLoadingFinished({ method, params }) { - assert.strictEqual(method, 'Network.loadingFinished'); - assert.ok(params.requestId.startsWith('node-network-event-')); - return params; -} - -async function testInvalidCompressedResponse(server) { - const port = server.address().port; - const secure = server === http2SecureServer; - const origin = (secure ? 'https' : 'http') + `://localhost:${port}`; - const path = '/invalid-gzip'; - const url = `${origin}${path}`; - - const responseReceivedFuture = once(session, 'Network.responseReceived') - .then(([event]) => verifyResponseReceived(event, { url })); - - await new Promise((resolve) => { - const client = http2.connect(origin, { - rejectUnauthorized: false, - }); - - const req = client.request({ - [http2.constants.HTTP2_HEADER_PATH]: path, - [http2.constants.HTTP2_HEADER_METHOD]: 'GET', - }); - - // Consume the response to trigger the decompression error in inspector - req.on('data', () => {}); - req.on('end', () => { - client.close(); - resolve(); - }); - req.on('error', () => { - client.close(); - resolve(); - }); - req.end(); - }); - - await responseReceivedFuture; - // Note: loadingFinished is not emitted when decompression fails, - // but this test ensures the error handler is triggered for coverage. -} - -async function testCompressedResponse(server, encoding, path) { - const port = server.address().port; - const secure = server === http2SecureServer; - const origin = (secure ? 'https' : 'http') + `://localhost:${port}`; - const url = `${origin}${path}`; - - const responseReceivedFuture = once(session, 'Network.responseReceived') - .then(([event]) => verifyResponseReceived(event, { url })); - - const loadingFinishedFuture = once(session, 'Network.loadingFinished') - .then(([event]) => verifyLoadingFinished(event)); - - const chunks = []; - - await new Promise((resolve, reject) => { - const client = http2.connect(origin, { - rejectUnauthorized: false, - }); - - const req = client.request({ - [http2.constants.HTTP2_HEADER_PATH]: path, - [http2.constants.HTTP2_HEADER_METHOD]: 'GET', - }); - - // Manually decompress the response to verify it works for user code - let decompressor; - if (encoding === 'gzip' || encoding === 'x-gzip') { - decompressor = zlib.createGunzip(); - } else if (encoding === 'deflate') { - decompressor = zlib.createInflate(); - } else if (encoding === 'br') { - decompressor = zlib.createBrotliDecompress(); - } else if (encoding === 'zstd') { - decompressor = zlib.createZstdDecompress(); - } - - if (decompressor) { - req.pipe(decompressor); - decompressor.on('data', (chunk) => chunks.push(chunk)); - decompressor.on('end', () => { - client.close(); - resolve(); - }); - decompressor.on('error', (err) => { - client.close(); - reject(err); - }); - } else { - req.on('data', (chunk) => chunks.push(chunk)); - req.on('end', () => { - client.close(); - resolve(); - }); - } - - req.on('error', (err) => { - client.close(); - reject(err); - }); - req.end(); - }); - - // Verify user code can read the decompressed response - const body = Buffer.concat(chunks).toString(); - assert.strictEqual(body, plainTextBody); - - const responseReceived = await responseReceivedFuture; - await loadingFinishedFuture; - - // Verify the inspector receives the decompressed response body - const responseBody = await session.post('Network.getResponseBody', { - requestId: responseReceived.requestId, - }); - assert.strictEqual(responseBody.base64Encoded, false); - assert.strictEqual(responseBody.body, plainTextBody); -} - -const testNetworkInspection = async () => { - // Test gzip - await testCompressedResponse(http2Server, 'gzip', '/gzip'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, 'gzip', '/gzip'); - session.removeAllListeners(); - - // Test x-gzip (alternate gzip encoding) - await testCompressedResponse(http2Server, 'x-gzip', '/x-gzip'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, 'x-gzip', '/x-gzip'); - session.removeAllListeners(); - - // Test deflate - await testCompressedResponse(http2Server, 'deflate', '/deflate'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, 'deflate', '/deflate'); - session.removeAllListeners(); - - // Test brotli - await testCompressedResponse(http2Server, 'br', '/br'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, 'br', '/br'); - session.removeAllListeners(); - - // Test zstd - await testCompressedResponse(http2Server, 'zstd', '/zstd'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, 'zstd', '/zstd'); - session.removeAllListeners(); - - // Test plain (no compression) - await testCompressedResponse(http2Server, null, '/plain'); - session.removeAllListeners(); - await testCompressedResponse(http2SecureServer, null, '/plain'); - session.removeAllListeners(); - - // Test invalid compressed data (triggers decompression error handler) - await testInvalidCompressedResponse(http2Server); - session.removeAllListeners(); - await testInvalidCompressedResponse(http2SecureServer); - session.removeAllListeners(); -}; - -http2Server.listen(0, () => { - http2SecureServer.listen(0, async () => { - try { - await session.post('Network.enable'); - await testNetworkInspection(); - await session.post('Network.disable'); - } catch (e) { - assert.fail(e); - } finally { - terminate(); - } - }); -});