From 1d0efc5b5d0f73ac89f480bf5d0426b3af40bc01 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 22 May 2019 18:39:57 +0200 Subject: [PATCH 1/5] test: expect wpt/encoding/streams to fail Since we do not implement TextDecoderStream or TextEncoderStream. --- test/wpt/status/encoding.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/wpt/status/encoding.json b/test/wpt/status/encoding.json index 5cf387e554844e..088eed802f0fe7 100644 --- a/test/wpt/status/encoding.json +++ b/test/wpt/status/encoding.json @@ -47,5 +47,8 @@ }, "unsupported-encodings.any.js": { "skip": "decoding-helpers.js needs XMLHttpRequest" + }, + "streams/*.js": { + "fail": "No implementation of TextDecoderStream and TextEncoderStream" } } From aca5d75397156db0fb4c15ca8104b935c0201f01 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 22 May 2019 15:39:22 +0200 Subject: [PATCH 2/5] test: run WPT in subdirectories For a directory like this: - wpt - encoding - streams - backpressure.any.js - api-basics.any.js Previously we only run `api-basics.any.js`, now we also run `backpressure.any.js` (and any tests in more deeply nested directories). This enables us to run more of WPT since not every module put their tests at the top level directory. --- test/common/wpt.js | 78 +++++++++++++++++++++++++++++++++------------- 1 file changed, 57 insertions(+), 21 deletions(-) diff --git a/test/common/wpt.js b/test/common/wpt.js index 8401d101dc881f..8a23e9bed5b65f 100644 --- a/test/common/wpt.js +++ b/test/common/wpt.js @@ -41,16 +41,25 @@ class ResourceLoader { this.path = path; } - fetch(url, asPromise = true) { + /** + * Load a resource in test/fixtures/wpt specified with a URL + * @param {string} from the path of the file loading this resource, + * relative to thw WPT folder. + * @param {string} url the url of the resource being loaded. + * @param {boolean} asPromise if true, return the resource in a + * pseudo-Response object. + */ + read(from, url, asFetch = true) { // We need to patch this to load the WebIDL parser url = url.replace( '/resources/WebIDLParser.js', '/resources/webidl2/lib/webidl2.js' ); + const base = path.dirname(from); const file = url.startsWith('/') ? fixtures.path('wpt', url) : - fixtures.path('wpt', this.path, url); - if (asPromise) { + fixtures.path('wpt', base, url); + if (asFetch) { return fsPromises.readFile(file) .then((data) => { return { @@ -155,8 +164,12 @@ class WPTTest { } } + getRelativePath() { + return path.join(this.module, this.filename); + } + getAbsolutePath() { - return fixtures.path('wpt', this.module, this.filename); + return fixtures.path('wpt', this.getRelativePath()); } getContent() { @@ -217,20 +230,41 @@ class StatusLoader { this.tests = []; } + /** + * Grep for all .*.js file recursively in a directory. + * @param {string} dir + */ + grep(dir) { + let result = []; + const list = fs.readdirSync(dir); + for (const file of list) { + const filepath = path.join(dir, file); + const stat = fs.statSync(filepath); + if (stat.isDirectory()) { + const list = this.grep(filepath); + result = result.concat(list); + } else { + if (!(/\.\w+\.js$/.test(filepath))) { + continue; + } + result.push(filepath); + } + } + return result; + } + load() { const dir = path.join(__dirname, '..', 'wpt'); const statusFile = path.join(dir, 'status', `${this.path}.json`); const result = JSON.parse(fs.readFileSync(statusFile, 'utf8')); this.rules.addRules(result); - const list = fs.readdirSync(fixtures.path('wpt', this.path)); - + const subDir = fixtures.path('wpt', this.path); + const list = this.grep(subDir); for (const file of list) { - if (!(/\.\w+\.js$/.test(file))) { - continue; - } - const match = this.rules.match(file); - this.tests.push(new WPTTest(this.path, file, match)); + const relativePath = path.relative(subDir, file); + const match = this.rules.match(relativePath); + this.tests.push(new WPTTest(this.path, relativePath, match)); } this.loaded = true; } @@ -309,8 +343,9 @@ class WPTRunner { const meta = test.title = this.getMeta(content); const absolutePath = test.getAbsolutePath(); - const context = this.generateContext(test.filename); - const code = this.mergeScripts(meta, content); + const context = this.generateContext(test); + const relativePath = test.getRelativePath(); + const code = this.mergeScripts(relativePath, meta, content); try { vm.runInContext(code, context, { filename: absolutePath @@ -327,14 +362,14 @@ class WPTRunner { this.tryFinish(); } - mock() { + mock(testfile) { const resource = this.resource; const result = { // This is a mock, because at the moment fetch is not implemented // in Node.js, but some tests and harness depend on this to pull // resources. fetch(file) { - return resource.fetch(file); + return resource.read(testfile, file, true); }, GLOBAL: { isWindow() { return false; } @@ -346,16 +381,17 @@ class WPTRunner { } // Note: this is how our global space for the WPT test should look like - getSandbox() { - const result = this.mock(); + getSandbox(filename) { + const result = this.mock(filename); for (const [name, desc] of this.globals) { Object.defineProperty(result, name, desc); } return result; } - generateContext(filename) { - const sandbox = this.sandbox = this.getSandbox(); + generateContext(test) { + const filename = test.filename; + const sandbox = this.sandbox = this.getSandbox(test.getRelativePath()); const context = this.context = vm.createContext(sandbox); const harnessPath = fixtures.path('wpt', 'resources', 'testharness.js'); @@ -509,7 +545,7 @@ class WPTRunner { } } - mergeScripts(meta, content) { + mergeScripts(base, meta, content) { if (!meta.script) { return content; } @@ -517,7 +553,7 @@ class WPTRunner { // only one script let result = ''; for (const script of meta.script) { - result += this.resource.fetch(script, false); + result += this.resource.read(base, script, false); } return result + content; From 45e4391602714d70a83df5e65b14965273aedff1 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 22 May 2019 18:44:27 +0200 Subject: [PATCH 3/5] test: update wpt/encoding to 7287608f90 Using `git node wpt encoding` --- test/fixtures/wpt/README.md | 2 +- test/fixtures/wpt/encoding/encodeInto.any.js | 149 ++++++++++++++++++ .../encoding/streams/decode-bad-chunks.any.js | 20 --- .../wpt/encoding/streams/decode-utf8.any.js | 20 +++ .../textdecoder-fatal-single-byte.any.js | 2 + test/fixtures/wpt/versions.json | 2 +- 6 files changed, 173 insertions(+), 22 deletions(-) create mode 100644 test/fixtures/wpt/encoding/encodeInto.any.js diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 1865d6698c9faa..802e20d816711c 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -11,7 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run. Last update: - console: https://github.com/web-platform-tests/wpt/tree/9786a4b131/console -- encoding: https://github.com/web-platform-tests/wpt/tree/a093a659ed/encoding +- encoding: https://github.com/web-platform-tests/wpt/tree/7287608f90/encoding - url: https://github.com/web-platform-tests/wpt/tree/418f7fabeb/url - resources: https://github.com/web-platform-tests/wpt/tree/e1fddfbf80/resources - interfaces: https://github.com/web-platform-tests/wpt/tree/712c9f275e/interfaces diff --git a/test/fixtures/wpt/encoding/encodeInto.any.js b/test/fixtures/wpt/encoding/encodeInto.any.js new file mode 100644 index 00000000000000..fda0d1b72ce787 --- /dev/null +++ b/test/fixtures/wpt/encoding/encodeInto.any.js @@ -0,0 +1,149 @@ +[ + { + "input": "Hi", + "read": 0, + "destinationLength": 0, + "written": [] + }, + { + "input": "A", + "read": 1, + "destinationLength": 10, + "written": [0x41] + }, + { + "input": "\u{1D306}", // "\uD834\uDF06" + "read": 2, + "destinationLength": 4, + "written": [0xF0, 0x9D, 0x8C, 0x86] + }, + { + "input": "\u{1D306}A", + "read": 0, + "destinationLength": 3, + "written": [] + }, + { + "input": "\uD834A\uDF06A¥Hi", + "read": 5, + "destinationLength": 10, + "written": [0xEF, 0xBF, 0xBD, 0x41, 0xEF, 0xBF, 0xBD, 0x41, 0xC2, 0xA5] + }, + { + "input": "A\uDF06", + "read": 2, + "destinationLength": 4, + "written": [0x41, 0xEF, 0xBF, 0xBD] + }, + { + "input": "¥¥", + "read": 2, + "destinationLength": 4, + "written": [0xC2, 0xA5, 0xC2, 0xA5] + } +].forEach(testData => { + [ + { + "bufferIncrease": 0, + "destinationOffset": 0, + "filler": 0 + }, + { + "bufferIncrease": 10, + "destinationOffset": 4, + "filler": 0 + }, + { + "bufferIncrease": 0, + "destinationOffset": 0, + "filler": 0x80 + }, + { + "bufferIncrease": 10, + "destinationOffset": 4, + "filler": 0x80 + }, + { + "bufferIncrease": 0, + "destinationOffset": 0, + "filler": "random" + }, + { + "bufferIncrease": 10, + "destinationOffset": 4, + "filler": "random" + } + ].forEach(destinationData => { + test(() => { + // Setup + const bufferLength = testData.destinationLength + destinationData.bufferIncrease, + destinationOffset = destinationData.destinationOffset, + destinationLength = testData.destinationLength, + destinationFiller = destinationData.filler, + encoder = new TextEncoder(), + buffer = new ArrayBuffer(bufferLength), + view = new Uint8Array(buffer, destinationOffset, destinationLength), + fullView = new Uint8Array(buffer), + control = new Array(bufferLength); + let byte = destinationFiller; + for (let i = 0; i < bufferLength; i++) { + if (destinationFiller === "random") { + byte = Math.floor(Math.random() * 256); + } + control[i] = byte; + fullView[i] = byte; + } + + // It's happening + const result = encoder.encodeInto(testData.input, view); + + // Basics + assert_equals(view.byteLength, destinationLength); + assert_equals(view.length, destinationLength); + + // Remainder + assert_equals(result.read, testData.read); + assert_equals(result.written, testData.written.length); + for (let i = 0; i < bufferLength; i++) { + if (i < destinationOffset || i >= (destinationOffset + testData.written.length)) { + assert_equals(fullView[i], control[i]); + } else { + assert_equals(fullView[i], testData.written[i - destinationOffset]); + } + } + }, "encodeInto() with " + testData.input + " and destination length " + testData.destinationLength + ", offset " + destinationData.destinationOffset + ", filler " + destinationData.filler); + }); +}); + +[DataView, + Int8Array, + Int16Array, + Int32Array, + Uint16Array, + Uint32Array, + Uint8ClampedArray, + Float32Array, + Float64Array].forEach(view => { + test(() => { + assert_throws(new TypeError(), () => new TextEncoder().encodeInto("", new view(new ArrayBuffer(0)))); + }, "Invalid encodeInto() destination: " + view); + }); + +test(() => { + assert_throws(new TypeError(), () => new TextEncoder().encodeInto("", new ArrayBuffer(10))); +}, "Invalid encodeInto() destination: ArrayBuffer"); + +test(() => { + const buffer = new ArrayBuffer(10), + view = new Uint8Array(buffer); + let { read, written } = new TextEncoder().encodeInto("", view); + assert_equals(read, 0); + assert_equals(written, 0); + new MessageChannel().port1.postMessage(buffer, [buffer]); + ({ read, written } = new TextEncoder().encodeInto("", view)); + assert_equals(read, 0); + assert_equals(written, 0); + ({ read, written } = new TextEncoder().encodeInto("test", view)); + assert_equals(read, 0); + assert_equals(written, 0); +}, "encodeInto() and a detached output buffer"); diff --git a/test/fixtures/wpt/encoding/streams/decode-bad-chunks.any.js b/test/fixtures/wpt/encoding/streams/decode-bad-chunks.any.js index 101fb3aeb614cf..de2ba74c1016a1 100644 --- a/test/fixtures/wpt/encoding/streams/decode-bad-chunks.any.js +++ b/test/fixtures/wpt/encoding/streams/decode-bad-chunks.any.js @@ -23,26 +23,6 @@ const badChunks = [ name: 'array', value: [65] }, - { - name: 'detached ArrayBufferView', - value: (() => { - const u8 = new Uint8Array([65]); - const ab = u8.buffer; - const mc = new MessageChannel(); - mc.port1.postMessage(ab, [ab]); - return u8; - })() - }, - { - name: 'detached ArrayBuffer', - value: (() => { - const u8 = new Uint8Array([65]); - const ab = u8.buffer; - const mc = new MessageChannel(); - mc.port1.postMessage(ab, [ab]); - return ab; - })() - }, { name: 'SharedArrayBuffer', // Use a getter to postpone construction so that all tests don't fail where diff --git a/test/fixtures/wpt/encoding/streams/decode-utf8.any.js b/test/fixtures/wpt/encoding/streams/decode-utf8.any.js index 34fa764bf0a682..266899c6c965f7 100644 --- a/test/fixtures/wpt/encoding/streams/decode-utf8.any.js +++ b/test/fixtures/wpt/encoding/streams/decode-utf8.any.js @@ -39,3 +39,23 @@ promise_test(async () => { assert_array_equals(array, [expectedOutputString], 'the output should be in one chunk'); }, 'a trailing empty chunk should be ignored'); + +promise_test(async () => { + const buffer = new ArrayBuffer(3); + const view = new Uint8Array(buffer, 1, 1); + view[0] = 65; + new MessageChannel().port1.postMessage(buffer, [buffer]); + const input = readableStreamFromArray([view]); + const output = input.pipeThrough(new TextDecoderStream()); + const array = await readableStreamToArray(output); + assert_array_equals(array, [], 'no chunks should be output'); +}, 'decoding a transferred Uint8Array chunk should give no output'); + +promise_test(async () => { + const buffer = new ArrayBuffer(1); + new MessageChannel().port1.postMessage(buffer, [buffer]); + const input = readableStreamFromArray([buffer]); + const output = input.pipeThrough(new TextDecoderStream()); + const array = await readableStreamToArray(output); + assert_array_equals(array, [], 'no chunks should be output'); +}, 'decoding a transferred ArrayBuffer chunk should give no output'); diff --git a/test/fixtures/wpt/encoding/textdecoder-fatal-single-byte.any.js b/test/fixtures/wpt/encoding/textdecoder-fatal-single-byte.any.js index 9d12134edc58d8..d3e9ae9c9a7774 100644 --- a/test/fixtures/wpt/encoding/textdecoder-fatal-single-byte.any.js +++ b/test/fixtures/wpt/encoding/textdecoder-fatal-single-byte.any.js @@ -1,4 +1,6 @@ +// META: timeout=long // META: title=Encoding API: Fatal flag for single byte encodings +// META: timeout=long var singleByteEncodings = [ {encoding: 'IBM866', bad: []}, diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 96909391b1ee93..0ca53d9103651f 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -4,7 +4,7 @@ "path": "console" }, "encoding": { - "commit": "a093a659ed118112138f8a1ffba97a66c1ea8235", + "commit": "7287608f90f6b9530635d10086fd2ab386faab38", "path": "encoding" }, "url": { From bea3f049c582a79e0fb004cd7fbc30131a991e77 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 22 May 2019 18:45:05 +0200 Subject: [PATCH 4/5] test: expect wpt/encoding/encodeInto.any.js to fail Since we do not implement TextEncoder.prototype.encodeInto --- test/wpt/status/encoding.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/wpt/status/encoding.json b/test/wpt/status/encoding.json index 088eed802f0fe7..a81ba605c1e72f 100644 --- a/test/wpt/status/encoding.json +++ b/test/wpt/status/encoding.json @@ -50,5 +50,8 @@ }, "streams/*.js": { "fail": "No implementation of TextDecoderStream and TextEncoderStream" + }, + "encodeInto.any.js": { + "fail": "TextEncoder.prototype.encodeInto not implemented" } } From e9d79275f43c7558561a58b00083c54bc0629530 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Mon, 27 May 2019 13:36:15 +0200 Subject: [PATCH 5/5] fixup! test: run WPT in subdirectories --- test/common/wpt.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/common/wpt.js b/test/common/wpt.js index 8a23e9bed5b65f..c227f2e6fa3529 100644 --- a/test/common/wpt.js +++ b/test/common/wpt.js @@ -94,7 +94,7 @@ class StatusRule { * @returns {RegExp} */ transformPattern(pattern) { - const result = pattern.replace(/[-/\\^$+?.()|[\]{}]/g, '\\$&'); + const result = path.normalize(pattern).replace(/[-/\\^$+?.()|[\]{}]/g, '\\$&'); return new RegExp(result.replace('*', '.*')); } }