Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions inputfiles/patches/authenticator.kdl
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
removals {
enum AuthenticatorTransport {
smart-card // WebKit only as of 2023-05
}
dictionary AuthenticationExtensionsClientInputs {
// https://searchfox.org/mozilla-central/source/dom/webidl/WebAuthentication.webidl
// https://searchfox.org/wubkat/source/Source/WebCore/Modules/webauthn/AuthenticationExtensionsClientInputs.idl
// https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/modules/credentialmanagement/authentication_extensions_client_inputs.idl
member appidExclude
member credBlob
member getCredBlob
member hmacGetSecret // No implementation as of 2025-05
member payment
}
dictionary AuthenticationExtensionsClientInputsJSON {
member appidExclude
}
dictionary AuthenticationExtensionsClientOutputs {
// (same as *Inputs)
member appidExclude // No implementation as of 2025-05
member hmacGetSecret // No implementation as of 2025-05
member payment // Blink only as of 2025-06
}
}
34 changes: 0 additions & 34 deletions inputfiles/removedTypes.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,6 @@
},
"enums": {
"enum": {
"AuthenticatorTransport": {
"value": ["smart-card"] // WebKit only as of 2023-05
},
"ConnectionType": {
"value": ["wimax"]
},
Expand Down Expand Up @@ -264,37 +261,6 @@
}
}
},
"AuthenticationExtensionsClientInputs": {
"members": {
"member": {
// https://searchfox.org/mozilla-central/source/dom/webidl/WebAuthentication.webidl
// https://searchfox.org/wubkat/source/Source/WebCore/Modules/webauthn/AuthenticationExtensionsClientInputs.idl
// https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/modules/credentialmanagement/authentication_extensions_client_inputs.idl
"appidExclude": null,
"credBlob": null,
"getCredBlob": null,
"hmacGetSecret": null, // No implementation as of 2025-05
"payment": null
}
}
},
"AuthenticationExtensionsClientInputsJSON": {
"members": {
"member": {
"appidExclude": null
}
}
},
"AuthenticationExtensionsClientOutputs": {
"members": {
"member": {
// (same as *Inputs)
"appidExclude": null, // No implementation as of 2025-05
"hmacGetSecret": null, // No implementation as of 2025-05
"payment": null // Blink only as of 2025-06
}
}
},
"CanvasRenderingContext2DSettings": {
"members": {
"member": {
Expand Down
3 changes: 2 additions & 1 deletion src/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ async function emitDom() {

const overriddenItems = await readInputJSON("overridingTypes.jsonc");
const addedItems = await readInputJSON("addedTypes.jsonc");
const patches = await readPatches();
const { patches, removalPatches } = await readPatches();
const comments = await readInputJSON("comments.json");
const documentationFromMDN = await generateDescriptions();
const removedItems = await readInputJSON("removedTypes.jsonc");
Expand Down Expand Up @@ -204,6 +204,7 @@ async function emitDom() {
webidl = merge(webidl, getRemovalData(webidl));
webidl = merge(webidl, getDocsData(webidl));
webidl = prune(webidl, removedItems);
webidl = prune(webidl, removalPatches);
webidl = merge(webidl, addedItems);
webidl = merge(webidl, overriddenItems);
webidl = merge(webidl, patches);
Expand Down
146 changes: 117 additions & 29 deletions src/build/patches.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,22 +23,38 @@
signature: DeepPartial<Signature>[] | Record<number, DeepPartial<Signature>>;
}

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(this should be restored)

function optionalMember<const T>(prop: string, type: T, value?: Value) {
function optionalMember<const T>(
prop: string,
type: T,
value?: Value | DeepPartial<WebIdl>,
) {
if (value === undefined) {
return {};
}
// Support deep property assignment, e.g. prop = "a.b.c"
const propPath = prop.split(".");
if (typeof value !== type) {
throw new Error(`Expected type ${value} for ${prop}`);
}
return {
[prop]: value as T extends "string"
? string
: T extends "number"
? number
: T extends "boolean"
? boolean
: never,
};
// If value is an object, ensure it is not empty (has at least one key)
if (type === "object" && typeof value === "object" && value !== null) {
if (Object.keys(value as object).length === 0) {
return {};
}
}

// Build the nested object dynamically
let nested: any = value as T extends "string"
? string
: T extends "number"
? number
: T extends "boolean"
? boolean
: never;
for (let i = propPath.length - 1; i >= 0; i--) {
nested = { [propPath[i]]: nested };
}
return nested;
}

function string(arg: unknown): string {
Expand Down Expand Up @@ -77,22 +93,21 @@
}

/**
* Converts patch files in KDL to match the [types](types.d.ts).
* Converts parsed KDL Document nodes to match the [types](types.d.ts).
*/
function parseKDL(kdlText: string): DeepPartial<WebIdl> {
const { output, errors } = parse(kdlText);
function convertKDLNodes(nodes: Node[]): DeepPartial<WebIdl> {
// Accept either Document or array of nodes
const actualNodes: Node[] = Array.isArray(nodes)

Check failure on line 100 in src/build/patches.ts

View workflow job for this annotation

GitHub Actions / test

Replace `⏎····?·nodes⏎···` with `·?·nodes`
? nodes
: nodes;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nodes or nodes is just nodes


if (errors.length) {
throw new Error("KDL parse errors", { cause: errors });
}

const nodes = output!;
const enums: Record<string, Enum> = {};
const mixin: Record<string, DeepPartial<Interface>> = {};
const interfaces: Record<string, DeepPartial<Interface>> = {};
const dictionary: Record<string, DeepPartial<Dictionary>> = {};

for (const node of nodes) {
for (const node of actualNodes) {
// Note: no "removals" handling here; caller is responsible for splitting
const name = string(node.values[0]);
switch (node.name) {
case "enum":
Expand All @@ -113,10 +128,12 @@
}

return {
enums: { enum: enums },
mixins: { mixin },
interfaces: { interface: interfaces },
dictionaries: { dictionary },
...optionalMember("enums.enum", "object", enums),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need this? Why?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I still don't understand why this change is needed

...optionalMember("mixins.mixin", "object", mixin),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same for this and below, and generally the dot accessor thing as a whole.

...optionalMember("interfaces.interface", "object", interfaces),
dictionaries: {
dictionary

Check failure on line 135 in src/build/patches.ts

View workflow job for this annotation

GitHub Actions / test

Insert `,`
}

Check failure on line 136 in src/build/patches.ts

View workflow job for this annotation

GitHub Actions / test

Insert `,`
};
}

Expand Down Expand Up @@ -368,21 +385,92 @@
}

/**
* Read and parse a single KDL file.
* Read and parse a single KDL file into its KDL Document structure.
*/
export async function readPatch(fileUrl: URL): Promise<any> {
async function readPatchDocument(fileUrl: URL): Promise<Node[]> {
const text = await readFile(fileUrl, "utf8");
return parseKDL(text);
const { output, errors } = parse(text);
if (errors.length) {
throw new Error(`KDL parse errors in ${fileUrl.toString()}`, {
cause: errors,
});
}
return output!;
}
/**
* Recursively remove all 'name' fields from the object and its children, and
* replace any empty objects ({} or []) with null.
*/
function sanitizeRemovals(obj: unknown): unknown {
Copy link
Contributor

@saschanaz saschanaz Dec 1, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

convertForRemovals? It's not really sanitizing - normalizing random user input, but rather converting into a different format.

if (Array.isArray(obj)) {
const result = obj.map(sanitizeRemovals).filter((v) => v !== undefined);
return result.length === 0 ? null : result;
}
if (obj && typeof obj === "object") {
const newObj: { [key: string]: unknown } = {};
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can use Record here

for (const [key, value] of Object.entries(obj as Record<string, unknown>)) {
if (key !== "name") {
const cleaned = sanitizeRemovals(value);
if (cleaned !== undefined) {
newObj[key] = cleaned;
}
}
}
// Replace empty objects with null
return Object.keys(newObj).length === 0 ? null : newObj;
}
return obj;
}

/**
* Read, parse, and merge all KDL files under the input folder.
* Splits the main patch content and the removals from each file for combined processing.
*
* Returns:
* {
* patches: merged patch contents (excluding removals),
* removalPatches: merged removals, with names stripped
* }
*/
export default async function readPatches(): Promise<any> {
export default async function readPatches(): Promise<{
patches: any;
removalPatches: any;
}> {
const patchDirectory = new URL("../../inputfiles/patches/", import.meta.url);
const fileUrls = await getAllFileURLs(patchDirectory);

const parsedContents = await Promise.all(fileUrls.map(readPatch));
// Stage 1: Parse all file KDLs into Documents
const documents = await Promise.all(fileUrls.map(readPatchDocument));

// Stage 2: For each document, split main nodes and removals nodes
const patchNodeGroups: Node[][] = [];
const removalsNodeGroups: Node[][] = [];

for (const doc of documents) {
const mainNodes: Node[] = [];
let localRemovalsNodes: Node[] = [];
for (const node of doc) {
if (node.name === "removals") {
// Each removals node may itself contain multiple root nodes
localRemovalsNodes = localRemovalsNodes.concat(node.children);
} else {
mainNodes.push(node);
}
}
patchNodeGroups.push(mainNodes);
if (localRemovalsNodes.length > 0) {
removalsNodeGroups.push(localRemovalsNodes);
}
}

// Stage 3: Merge all main patches and removals separately using convertKDLNodes
const patchObjs = patchNodeGroups.map((nodes) => convertKDLNodes(nodes));
const removalObjs = removalsNodeGroups.map((nodes) => convertKDLNodes(nodes));

const patches = patchObjs.reduce((acc, cur) => merge(acc, cur), {});
const removalPatches = sanitizeRemovals(
removalObjs.reduce((acc, cur) => merge(acc, cur), {}),
);

return parsedContents.reduce((acc, current) => merge(acc, current), {});
return { patches, removalPatches };
}
Loading