Store token type as number (#13768)

* refactor: abstract token metadata access

* refactor: move token-specific update context logic

* refactor: centralize token definitions

* refactor: abstract token type creation

* refactor: use number as token storage

* build: replace tt.* as number

* fix flow errors

* fix: build on Node 12

* Update packages/babel-parser/src/tokenizer/types.js

Co-authored-by: Nicolò Ribaudo <nicolo.ribaudo@gmail.com>

* refactor: rename token types exports to tt

* update unit test

* test: update Babel 8 test fixtures

* fix: centralize obsolete token type updateContext

* fix flow errors

Co-authored-by: Nicolò Ribaudo <nicolo.ribaudo@gmail.com>
This commit is contained in:
Huáng Jùnliàng
2021-09-17 09:36:11 -04:00
committed by GitHub
parent eec8372b56
commit d2076a531f
48 changed files with 1408 additions and 329 deletions

View File

@@ -1,6 +1,6 @@
// @flow
import { TokenType } from "../tokenizer/types";
import { type TokenType } from "../tokenizer/types";
import type Parser from "../parser";
import type { ExpressionErrors } from "../parser/util";
import * as N from "../types";

View File

@@ -6,7 +6,12 @@
/* eslint-disable @babel/development-internal/dry-error-messages */
import type Parser from "../../parser";
import { types as tt, type TokenType } from "../../tokenizer/types";
import {
tokenIsKeyword,
tokenLabelName,
tt,
type TokenType,
} from "../../tokenizer/types";
import * as N from "../../types";
import type { Position } from "../../util/location";
import { types as tc } from "../../tokenizer/context";
@@ -157,7 +162,8 @@ function hasTypeImportKind(node: N.Node): boolean {
function isMaybeDefaultImport(state: { type: TokenType, value: any }): boolean {
return (
(state.type === tt.name || !!state.type.keyword) && state.value !== "from"
(state.type === tt.name || tokenIsKeyword(state.type)) &&
state.value !== "from"
);
}
@@ -1605,11 +1611,12 @@ export default (superClass: Class<Parser>): Class<Parser> =>
this.next();
return this.finishNode(node, "ExistsTypeAnnotation");
case tt._typeof:
return this.flowParseTypeofType();
default:
if (this.state.type.keyword === "typeof") {
return this.flowParseTypeofType();
} else if (this.state.type.keyword) {
const label = this.state.type.label;
if (tokenIsKeyword(this.state.type)) {
const label = tokenLabelName(this.state.type);
this.next();
return super.createIdentifier(node, label);
}
@@ -2650,7 +2657,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
if (
specifierTypeKind !== null &&
!this.match(tt.name) &&
!this.state.type.keyword
!tokenIsKeyword(this.state.type)
) {
// `import {type as ,` or `import {type as }`
specifier.imported = as_ident;
@@ -2665,7 +2672,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
} else {
if (
specifierTypeKind !== null &&
(this.match(tt.name) || this.state.type.keyword)
(this.match(tt.name) || tokenIsKeyword(this.state.type))
) {
// `import {type foo`
specifier.imported = this.parseIdentifier(true);

View File

@@ -8,7 +8,13 @@ import * as charCodes from "charcodes";
import XHTMLEntities from "./xhtml";
import type Parser from "../../parser";
import type { ExpressionErrors } from "../../parser/util";
import { TokenType, types as tt } from "../../tokenizer/types";
import {
tokenComesBeforeExpression,
tokenIsKeyword,
tokenLabelName,
type TokenType,
tt,
} from "../../tokenizer/types";
import { TokContext, types as tc } from "../../tokenizer/context";
import * as N from "../../types";
import { isIdentifierChar, isIdentifierStart } from "../../util/identifier";
@@ -45,23 +51,11 @@ const JsxErrors = makeErrorTemplates(
/* eslint-disable sort-keys */
// Be aware that this file is always executed and not only when the plugin is enabled.
// Therefore this contexts and tokens do always exist.
// Therefore the contexts do always exist.
tc.j_oTag = new TokContext("<tag");
tc.j_cTag = new TokContext("</tag");
tc.j_expr = new TokContext("<tag>...</tag>", true);
tt.jsxName = new TokenType("jsxName");
tt.jsxText = new TokenType("jsxText", { beforeExpr: true });
tt.jsxTagStart = new TokenType("jsxTagStart", { startsExpr: true });
tt.jsxTagEnd = new TokenType("jsxTagEnd");
tt.jsxTagStart.updateContext = context => {
context.push(
tc.j_expr, // treat as beginning of JSX expression
tc.j_oTag, // start opening tag context
);
};
function isFragment(object: ?N.JSXElement): boolean {
return object
? object.type === "JSXOpeningFragment" ||
@@ -259,8 +253,8 @@ export default (superClass: Class<Parser>): Class<Parser> =>
const node = this.startNode();
if (this.match(tt.jsxName)) {
node.name = this.state.value;
} else if (this.state.type.keyword) {
node.name = this.state.type.keyword;
} else if (tokenIsKeyword(this.state.type)) {
node.name = tokenLabelName(this.state.type);
} else {
this.unexpected();
}
@@ -624,6 +618,11 @@ export default (superClass: Class<Parser>): Class<Parser> =>
// reconsider as closing tag context
context.splice(-2, 2, tc.j_cTag);
this.state.exprAllowed = false;
} else if (type === tt.jsxTagStart) {
context.push(
tc.j_expr, // treat as beginning of JSX expression
tc.j_oTag, // start opening tag context
);
} else if (type === tt.jsxTagEnd) {
const out = context.pop();
if ((out === tc.j_oTag && prevType === tt.slash) || out === tc.j_cTag) {
@@ -633,12 +632,12 @@ export default (superClass: Class<Parser>): Class<Parser> =>
this.state.exprAllowed = true;
}
} else if (
type.keyword &&
tokenIsKeyword(type) &&
(prevType === tt.dot || prevType === tt.questionDot)
) {
this.state.exprAllowed = false;
} else {
this.state.exprAllowed = type.beforeExpr;
this.state.exprAllowed = tokenComesBeforeExpression(type);
}
}
};

View File

@@ -2,13 +2,11 @@
import * as charCodes from "charcodes";
import { types as tt, TokenType } from "../tokenizer/types";
import { tokenLabelName, tt } from "../tokenizer/types";
import type Parser from "../parser";
import * as N from "../types";
import { makeErrorTemplates, ErrorCodes } from "../parser/error";
tt.placeholder = new TokenType("%%", { startsExpr: true });
export type PlaceholderTypes =
| "Identifier"
| "StringLiteral"
@@ -288,7 +286,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
if (this.isUnparsedContextual(next, "from")) {
if (
this.input.startsWith(
tt.placeholder.label,
tokenLabelName(tt.placeholder),
this.nextTokenStartSince(next + 4),
)
) {

View File

@@ -7,7 +7,7 @@
import type { TokenType } from "../../tokenizer/types";
import type State from "../../tokenizer/state";
import { types as tt } from "../../tokenizer/types";
import { tokenOperatorPrecedence, tt } from "../../tokenizer/types";
import { types as ct } from "../../tokenizer/context";
import * as N from "../../types";
import type { Position } from "../../util/location";
@@ -2195,7 +2195,7 @@ export default (superClass: Class<Parser>): Class<Parser> =>
minPrec: number,
) {
if (
nonNull(tt._in.binop) > minPrec &&
tokenOperatorPrecedence(tt._in) > minPrec &&
!this.hasPrecedingLineBreak() &&
this.isContextual("as")
) {

View File

@@ -1,5 +1,5 @@
import type Parser from "../parser";
import { types as tt } from "../tokenizer/types";
import { tt } from "../tokenizer/types";
import * as N from "../types";
export default (superClass: Class<Parser>): Class<Parser> =>