Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
5 changes: 5 additions & 0 deletions test/fixtures/wpt/common/get-host-info.sub.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,20 @@ function get_host_info() {
var REMOTE_HOST = (ORIGINAL_HOST === 'localhost') ? '127.0.0.1' : ('www1.' + ORIGINAL_HOST);
var OTHER_HOST = '{{domains[www2]}}';
var NOTSAMESITE_HOST = (ORIGINAL_HOST === 'localhost') ? '127.0.0.1' : ('{{hosts[alt][]}}');
var OTHER_NOTSAMESITE_HOST = '{{hosts[alt][www2]}}';

return {
HTTP_PORT: HTTP_PORT,
HTTP_PORT2: HTTP_PORT2,
HTTPS_PORT: HTTPS_PORT,
HTTPS_PORT2: HTTPS_PORT2,
HTTP_PORT_ELIDED: HTTP_PORT_ELIDED,
HTTPS_PORT_ELIDED: HTTPS_PORT_ELIDED,
PORT: PORT,
PORT2: PORT2,
ORIGINAL_HOST: ORIGINAL_HOST,
REMOTE_HOST: REMOTE_HOST,
NOTSAMESITE_HOST,

ORIGIN: PROTOCOL + "//" + ORIGINAL_HOST + PORT_ELIDED,
HTTP_ORIGIN: 'http://' + ORIGINAL_HOST + HTTP_PORT_ELIDED,
Expand All @@ -44,6 +48,7 @@ function get_host_info() {
HTTPS_REMOTE_ORIGIN: 'https://' + REMOTE_HOST + HTTPS_PORT_ELIDED,
HTTPS_REMOTE_ORIGIN_WITH_CREDS: 'https://foo:bar@' + REMOTE_HOST + HTTPS_PORT_ELIDED,
HTTPS_NOTSAMESITE_ORIGIN: 'https://' + NOTSAMESITE_HOST + HTTPS_PORT_ELIDED,
HTTPS_OTHER_NOTSAMESITE_ORIGIN: 'https://' + OTHER_NOTSAMESITE_HOST + HTTPS_PORT_ELIDED,
UNAUTHENTICATED_ORIGIN: 'http://' + OTHER_HOST + HTTP_PORT_ELIDED,
AUTHENTICATED_ORIGIN: 'https://' + OTHER_HOST + HTTPS_PORT_ELIDED
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ function setAttributes(el, attrs) {
attrs = attrs || {}
for (var attr in attrs) {
if (attr !== 'src')
el.setAttribute(attr, attrs[attr]);
el.setAttribute(attr.toLowerCase(), attrs[attr]);
}
// Workaround for Chromium: set <img>'s src attribute after all other
// attributes to ensure the policy is applied.
Expand Down Expand Up @@ -826,6 +826,54 @@ function requestViaWebSocket(url) {
});
}

/**
* Creates a svg anchor element and the corresponding svg setup, appends the
* setup to {@code document.body} and performs the navigation.
* @param {string} url The URL to navigate to.
* @return {Promise} The promise for success/error events.
*/
function requestViaSVGAnchor(url, additionalAttributes) {
const name = guid();

const iframe =
createElement("iframe", { "name": name, "id": name }, document.body, false);

// Create SVG container
const svg = document.createElementNS("http://www.w3.org/2000/svg", "svg");

// Create SVG anchor element
const svgAnchor = document.createElementNS("http://www.w3.org/2000/svg", "a");
const link_attributes = Object.assign({ "href": url, "target": name }, additionalAttributes);
setAttributes(svgAnchor, link_attributes);

// Add some text content for the anchor
const text = document.createElementNS("http://www.w3.org/2000/svg", "text");
text.setAttribute("y", "50");
text.textContent = "SVG Link to resource";

svgAnchor.appendChild(text);
svg.appendChild(svgAnchor);
document.body.appendChild(svg);

const promise =
bindEvents2(window, "message", iframe, "error", window, "error")
.then(event => {
if (event.source !== iframe.contentWindow)
return Promise.reject(new Error('Unexpected event.source'));
return event.data;
});

// Simulate a click event on the SVG anchor
const event = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
svgAnchor.dispatchEvent(event);

return promise;
}

/**
@typedef SubresourceType
@type {string}
Expand Down Expand Up @@ -892,6 +940,10 @@ const subresourceMap = {
path: "/common/security-features/subresource/script.py",
invoker: requestViaDynamicImport,
},
"svg-a-tag": {
path: "/common/security-features/subresource/document.py",
invoker: requestViaSVGAnchor,
},
"video-tag": {
path: "/common/security-features/subresource/video.py",
invoker: requestViaVideo,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@
"sharedworker-import",
"sharedworker-import-data",
"sharedworker-module",
"svg-a-tag",
"video-tag",
"worker-classic",
"worker-import",
Expand Down Expand Up @@ -513,6 +514,7 @@
"sharedworker-import",
"sharedworker-import-data",
"sharedworker-module",
"svg-a-tag",
"video-tag",
"websocket",
"worker-classic",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def validate(spec_json, details):
valid_subresource_names = [
"a-tag", "area-tag", "audio-tag", "form-tag", "iframe-tag", "img-tag",
"link-css-tag", "link-prefetch-tag", "object-tag", "picture-tag",
"script-tag", "script-tag-dynamic-import", "video-tag"
"script-tag", "script-tag-dynamic-import", "svg-a-tag", "video-tag"
] + ["beacon", "fetch", "xhr", "websocket"] + [
"worker-classic", "worker-module", "worker-import",
"worker-import-data", "sharedworker-classic", "sharedworker-module",
Expand Down
3 changes: 3 additions & 0 deletions test/fixtures/wpt/eventsource/WEB_FEATURES.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
features:
- name: server-sent-events
files: "**"
3 changes: 3 additions & 0 deletions test/fixtures/wpt/fetch/api/abort/WEB_FEATURES.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
features:
- name: abortable-fetch
files: "**"
4 changes: 4 additions & 0 deletions test/fixtures/wpt/fetch/api/basic/WEB_FEATURES.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
features:
- name: fetch-request-streams
files:
- request-upload*
2 changes: 1 addition & 1 deletion test/fixtures/wpt/fetch/api/body/mime-type.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@

[
() => new Request("about:blank", { method: "POST", body: new Blob([""], { type: "Text/Plain" }), headers: [["Content-Type", "Text/Html"]] }),
() => new Response(new Blob([""], { type: "Text/Plain" }, { headers: [["Content-Type", "Text/Html"]] }))
() => new Response(new Blob([""], { type: "Text/Plain" }), { headers: [["Content-Type", "Text/Html"]] })
].forEach(bodyContainerCreator => {
const bodyContainer = bodyContainerCreator();
const cloned = bodyContainer.clone();
Expand Down
4 changes: 4 additions & 0 deletions test/fixtures/wpt/fetch/api/request/WEB_FEATURES.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
features:
- name: fetch-priority
files:
- request-init-priority.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
// list of bad ports according to
// https://fetch.spec.whatwg.org/#port-blocking
var BLOCKED_PORTS_LIST = [
0,
1, // tcpmux
7, // echo
9, // discard
Expand Down
2 changes: 1 addition & 1 deletion test/fixtures/wpt/fetch/api/resources/keepalive-helper.js
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ function assertStashedTokenAsync(
*
* `unloadIframe` to unload the iframe before verifying stashed token to
* simulate the situation that unloads after fetching. Note that this test is
* different from `keepaliveRedirectInUnloadTest()` in that the the latter
* different from `keepaliveRedirectInUnloadTest()` in that the latter
* performs fetch() call directly in `unload` event handler, while this test
* does it in `load`.
*/
Expand Down
4 changes: 2 additions & 2 deletions test/fixtures/wpt/fetch/api/response/response-clone.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ test(function() {

promise_test(function(test) {
return validateStreamFromString(response.body.getReader(), body);
}, "Check orginal response's body after cloning");
}, "Check original response's body after cloning");

promise_test(function(test) {
return validateStreamFromString(clonedResponse.body.getReader(), body);
Expand Down Expand Up @@ -104,7 +104,7 @@ function testReadableStreamClone(initialBuffer, bufferType)
}).then(function(data) {
assert_false(data.done);
if (initialBuffer instanceof ArrayBuffer) {
assert_true(data.value instanceof ArrayBuffer, "Cloned buffer is ArrayBufer");
assert_true(data.value instanceof ArrayBuffer, "Cloned buffer is ArrayBuffer");
assert_equals(initialBuffer.byteLength, data.value.byteLength, "Length equal");
assert_array_equals(new Uint8Array(data.value), new Uint8Array(initialBuffer), "Cloned buffer chunks have the same content");
} else if (initialBuffer instanceof DataView) {
Expand Down
3 changes: 3 additions & 0 deletions test/fixtures/wpt/fetch/compression-dictionary/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
These are the tests for the [Compression Dictionary Transport](https://datatracker.ietf.org/doc/draft-ietf-httpbis-compression-dictionary/) standard (currently in IETF draft state, approved for publication). The tests are marked as tentative, pending the publication of the RFC.

The MDN reference is [here](https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Compression_dictionary_transport).
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>

// This is a set of tests for the dictionary itself being compressed, both by
// non-dictionary content encodings and dictionary encodings. The encoding used
// for the dictionary itself is independent of the encoding used for the data
// so the test uses different encodings just to make sure that the dictionaries
// don't carry any encoding-specific dependencies.

compression_dictionary_promise_test(async (t) => {
const dictionaryUrl =
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=gzip`;
const dict = await (await fetch(dictionaryUrl)).text();
assert_equals(dict, kDefaultDictionaryContent);
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

// Check if the data compressed using the dictionary can be decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
const data = await (await fetch(data_url)).text();
assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using gzip-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
const dictionaryUrl =
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=br`;
const dict = await (await fetch(dictionaryUrl)).text();
assert_equals(dict, kDefaultDictionaryContent);
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

// Check if the data compressed using the dictionary can be decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=dcz`;
const data = await (await fetch(data_url)).text();
assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using Brotli-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
const dictionaryUrl =
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=zstd`;
const dict = await (await fetch(dictionaryUrl)).text();
assert_equals(dict, kDefaultDictionaryContent);
const dictionary_hash = await waitUntilAvailableDictionaryHeader(t, {});
assert_equals(dictionary_hash, kDefaultDictionaryHashBase64);

// Check if the data compressed using Brotli with the dictionary can be
// decompressed (Zstandard decompression of the data is tested separately).
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
const data = await (await fetch(data_url)).text();
assert_equals(data, kExpectedCompressedData);
}, 'Decompresion using Zstandard-encoded dictionary works as expected');

compression_dictionary_promise_test(async (t) => {
const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
const dict = await (await fetch(dictionaryUrl)).text();
assert_equals(dict, kDefaultDictionaryContent);
assert_equals(
await waitUntilAvailableDictionaryHeader(t, {}),
kDefaultDictionaryHashBase64);

// Register another dictionary, compressed with dcb using the first dictionary.
const compressedDictionaryUrl =
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcb&id=id2`;
const dict2 = await (await fetch(compressedDictionaryUrl)).text();
assert_equals(dict2, kDefaultDictionaryContent);
await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});

// Check if the data compressed using dcz with the updated dictionary works.
const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcz`;
const data = await (await fetch(data_url)).text();
assert_equals(data, kExpectedCompressedData);
}, 'A dcb dictionary-compressed dictionary can be used as a dictionary for future requests.');

compression_dictionary_promise_test(async (t) => {
const dictionaryUrl = `${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?id=id1`;
const dict = await (await fetch(dictionaryUrl)).text();
assert_equals(dict, kDefaultDictionaryContent);
assert_equals(
await waitUntilAvailableDictionaryHeader(t, {}),
kDefaultDictionaryHashBase64);

// Register another dictionary, compressed with dcz using the first dictionary.
const compressedDictionaryUrl =
`${SAME_ORIGIN_RESOURCES_URL}/register-dictionary.py?content_encoding=dcz&id=id2`;
const dict2 = await (await fetch(compressedDictionaryUrl)).text();
assert_equals(dict2, kDefaultDictionaryContent);
await waitUntilHeader(t, "dictionary-id", {expected_header: '"id2"'});

// Check if the data compressed using dcb with the updated dictionary works.
const data_url = `${SAME_ORIGIN_RESOURCES_URL}/compressed-data.py?content_encoding=dcb`;
const data = await (await fetch(data_url)).text();
assert_equals(data, kExpectedCompressedData);
}, 'A dcz dictionary-compressed dictionary can be used as a dictionary for future requests.');

</script>
</body>
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta name="timeout" content="long"/>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="/common/utils.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>

function getHeadersCrossorigin() {
function headersCallback(r) {
return (x) => {
r(x);
}
}
let script = document.createElement("script");
return new Promise((resolve, reject) => {
getHeadersCrossorigin['callback'] = headersCallback(resolve);
script.src =
`${CROSS_ORIGIN_RESOURCES_URL}/echo-headers.py?callback=getHeadersCrossorigin.callback`;
document.head.appendChild(script);
});
}

compression_dictionary_promise_test(async (t) => {
// Register the dictionary
const dict = await (await fetch(kRegisterDictionaryPath)).text();
assert_equals(dict, kDefaultDictionaryContent);
assert_equals(
await waitUntilAvailableDictionaryHeader(t, {}),
kDefaultDictionaryHashBase64);
// Test a no-cors crossorigin fetch
const headers = await getHeadersCrossorigin();
assert_false("available-dictionary" in headers);
}, 'Fetch cross-origin no-cors request does not include Available-Dictionary header');

</script>
</body>
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="/common/utils.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<script src="/resources/testharnessreport.js"></script>
<script src="/common/get-host-info.sub.js"></script>
<script src="/common/utils.js"></script>
<script src="./resources/compression-dictionary-util.js"></script>
<script src="./resources/compression-dictionary-util.sub.js"></script>
</head>
<body>
<script>
Expand Down
Loading