Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix parse() throws error on valid ReflectOnly Blink extended attributes #443

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 34 additions & 1 deletion lib/productions/extended-attributes.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,41 @@
import { Base } from "./base.js";
import { ArrayBase } from "./array-base.js";
import { list, identifiersOrStrings, argument_list, autoParenter } from "./helpers.js";
import { Token } from "./token.js";
import { list, argument_list, autoParenter } from "./helpers.js";
import { validationError } from "../error.js";

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
function identifiers(tokeniser) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, forgot that we are replacing identifiers with identifiersOrStrings. That means it's safe to remove this!

const ids = list(tokeniser, { parser: Token.parser(tokeniser, "identifier"), listName: "identifier list" });
if (!ids.length) {
tokeniser.error("Expected identifiers but none found");
}
return ids;
}
/**
*
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {string} tokenName
*/
function tokens(tokeniser, tokenName) {
const toks = list(tokeniser, {
parser: Token.parser(tokeniser, tokenName),
listName: tokenName + " list"
});
if (!toks.length) {
tokeniser.error(`Expected ${tokenName}s but none found`);
}
return toks;
}

// This will allow a set of strings to be parsed
function identifiersOrStrings(tokeniser) {
return tokens(tokeniser, "identifier") || tokens(tokeniser, "string");
}


class ExtendedAttributeParameters extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
Expand Down
38 changes: 1 addition & 37 deletions lib/productions/helpers.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { Type } from "./type.js";
import { Argument } from "./argument.js";
import { Token } from "./token.js";
import { ExtendedAttributes, SimpleExtendedAttribute } from "./extended-attributes.js";
import { Operation } from "./operation.js";
import { Attribute } from "./attribute.js";
Expand Down Expand Up @@ -43,9 +42,7 @@ export function list(tokeniser, { parser, allowDangler, listName = "list" }) {
return items;
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, this one is still needed!


export function const_value(tokeniser) {
return tokeniser.consume("true", "false", "Infinity", "-Infinity", "NaN", "decimal", "integer");
}
Expand Down Expand Up @@ -109,17 +106,6 @@ export function primitive_type(tokeniser) {
}
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
export function identifiers(tokeniser) {
const ids = list(tokeniser, { parser: Token.parser(tokeniser, "identifier"), listName: "identifier list" });
if (!ids.length) {
tokeniser.error("Expected identifiers but none found");
}
return ids;
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
Expand Down Expand Up @@ -289,25 +275,3 @@ export function autoParenter(data, parent) {
}
});
}

/**
* Returns a proxy that auto-assign `parent` field.
* @template T
* @param {T} tokeniser
* @param {*} [tokenName] either can be a string or identifier
* @return {T}
*/
function tokens(tokeniser, tokenName) {
const toks = list(tokeniser, {
parser: Token.parser(tokeniser, tokenName),
listName: tokenName + " list"
});
if (!toks.length) {
tokeniser.error(`Expected ${tokenName}s but none found`);
}
return toks;
}
// This will allow a set of strings to be parsed
export function identifiersOrStrings(tokeniser) {
return tokens(tokeniser, "identifier") || tokens(tokeniser, "string");
}