Rename "babylon" to "@babel/parser" (#7937) 🎉
This commit is contained in:
committed by
Henry Zhu
parent
0200a3e510
commit
daf0ca8680
124
packages/babel-parser/src/tokenizer/context.js
Normal file
124
packages/babel-parser/src/tokenizer/context.js
Normal file
@@ -0,0 +1,124 @@
|
||||
// @flow
|
||||
|
||||
// The algorithm used to determine whether a regexp can appear at a
|
||||
// given point in the program is loosely based on sweet.js' approach.
|
||||
// See https://github.com/mozilla/sweet.js/wiki/design
|
||||
|
||||
import { types as tt } from "./types";
|
||||
import { lineBreak } from "../util/whitespace";
|
||||
|
||||
export class TokContext {
|
||||
constructor(
|
||||
token: string,
|
||||
isExpr?: boolean,
|
||||
preserveSpace?: boolean,
|
||||
override?: Function, // Takes a Tokenizer as a this-parameter, and returns void.
|
||||
) {
|
||||
this.token = token;
|
||||
this.isExpr = !!isExpr;
|
||||
this.preserveSpace = !!preserveSpace;
|
||||
this.override = override;
|
||||
}
|
||||
|
||||
token: string;
|
||||
isExpr: boolean;
|
||||
preserveSpace: boolean;
|
||||
override: ?Function;
|
||||
}
|
||||
|
||||
export const types: {
|
||||
[key: string]: TokContext,
|
||||
} = {
|
||||
braceStatement: new TokContext("{", false),
|
||||
braceExpression: new TokContext("{", true),
|
||||
templateQuasi: new TokContext("${", true),
|
||||
parenStatement: new TokContext("(", false),
|
||||
parenExpression: new TokContext("(", true),
|
||||
template: new TokContext("`", true, true, p => p.readTmplToken()),
|
||||
functionExpression: new TokContext("function", true),
|
||||
};
|
||||
|
||||
// Token-specific context update code
|
||||
|
||||
tt.parenR.updateContext = tt.braceR.updateContext = function() {
|
||||
if (this.state.context.length === 1) {
|
||||
this.state.exprAllowed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const out = this.state.context.pop();
|
||||
if (
|
||||
out === types.braceStatement &&
|
||||
this.curContext() === types.functionExpression
|
||||
) {
|
||||
this.state.context.pop();
|
||||
this.state.exprAllowed = false;
|
||||
} else if (out === types.templateQuasi) {
|
||||
this.state.exprAllowed = true;
|
||||
} else {
|
||||
this.state.exprAllowed = !out.isExpr;
|
||||
}
|
||||
};
|
||||
|
||||
tt.name.updateContext = function(prevType) {
|
||||
if (this.state.value === "of" && this.curContext() === types.parenStatement) {
|
||||
this.state.exprAllowed = !prevType.beforeExpr;
|
||||
return;
|
||||
}
|
||||
|
||||
this.state.exprAllowed = false;
|
||||
|
||||
if (prevType === tt._let || prevType === tt._const || prevType === tt._var) {
|
||||
if (lineBreak.test(this.input.slice(this.state.end))) {
|
||||
this.state.exprAllowed = true;
|
||||
}
|
||||
}
|
||||
if (this.state.isIterator) {
|
||||
this.state.isIterator = false;
|
||||
}
|
||||
};
|
||||
|
||||
tt.braceL.updateContext = function(prevType) {
|
||||
this.state.context.push(
|
||||
this.braceIsBlock(prevType) ? types.braceStatement : types.braceExpression,
|
||||
);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.dollarBraceL.updateContext = function() {
|
||||
this.state.context.push(types.templateQuasi);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.parenL.updateContext = function(prevType) {
|
||||
const statementParens =
|
||||
prevType === tt._if ||
|
||||
prevType === tt._for ||
|
||||
prevType === tt._with ||
|
||||
prevType === tt._while;
|
||||
this.state.context.push(
|
||||
statementParens ? types.parenStatement : types.parenExpression,
|
||||
);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.incDec.updateContext = function() {
|
||||
// tokExprAllowed stays unchanged
|
||||
};
|
||||
|
||||
tt._function.updateContext = function(prevType) {
|
||||
if (this.state.exprAllowed && !this.braceIsBlock(prevType)) {
|
||||
this.state.context.push(types.functionExpression);
|
||||
}
|
||||
|
||||
this.state.exprAllowed = false;
|
||||
};
|
||||
|
||||
tt.backQuote.updateContext = function() {
|
||||
if (this.curContext() === types.template) {
|
||||
this.state.context.pop();
|
||||
} else {
|
||||
this.state.context.push(types.template);
|
||||
}
|
||||
this.state.exprAllowed = false;
|
||||
};
|
||||
1367
packages/babel-parser/src/tokenizer/index.js
Normal file
1367
packages/babel-parser/src/tokenizer/index.js
Normal file
File diff suppressed because it is too large
Load Diff
213
packages/babel-parser/src/tokenizer/state.js
Normal file
213
packages/babel-parser/src/tokenizer/state.js
Normal file
@@ -0,0 +1,213 @@
|
||||
// @flow
|
||||
|
||||
import type { Options } from "../options";
|
||||
import * as N from "../types";
|
||||
import { Position } from "../util/location";
|
||||
|
||||
import { types as ct, type TokContext } from "./context";
|
||||
import type { Token } from "./index";
|
||||
import { types as tt, type TokenType } from "./types";
|
||||
|
||||
export default class State {
|
||||
init(options: Options, input: string): void {
|
||||
this.strict =
|
||||
options.strictMode === false ? false : options.sourceType === "module";
|
||||
|
||||
this.input = input;
|
||||
|
||||
this.potentialArrowAt = -1;
|
||||
|
||||
this.noArrowAt = [];
|
||||
this.noArrowParamsConversionAt = [];
|
||||
|
||||
this.inMethod = false;
|
||||
this.inFunction = false;
|
||||
this.inParameters = false;
|
||||
this.maybeInArrowParameters = false;
|
||||
this.inGenerator = false;
|
||||
this.inAsync = false;
|
||||
this.inPropertyName = false;
|
||||
this.inType = false;
|
||||
this.inClassProperty = false;
|
||||
this.noAnonFunctionType = false;
|
||||
this.hasFlowComment = false;
|
||||
this.isIterator = false;
|
||||
|
||||
this.classLevel = 0;
|
||||
|
||||
this.labels = [];
|
||||
|
||||
this.decoratorStack = [[]];
|
||||
|
||||
this.yieldInPossibleArrowParameters = null;
|
||||
|
||||
this.tokens = [];
|
||||
|
||||
this.comments = [];
|
||||
|
||||
this.trailingComments = [];
|
||||
this.leadingComments = [];
|
||||
this.commentStack = [];
|
||||
// $FlowIgnore
|
||||
this.commentPreviousNode = null;
|
||||
|
||||
this.pos = this.lineStart = 0;
|
||||
this.curLine = options.startLine;
|
||||
|
||||
this.type = tt.eof;
|
||||
this.value = null;
|
||||
this.start = this.end = this.pos;
|
||||
this.startLoc = this.endLoc = this.curPosition();
|
||||
|
||||
// $FlowIgnore
|
||||
this.lastTokEndLoc = this.lastTokStartLoc = null;
|
||||
this.lastTokStart = this.lastTokEnd = this.pos;
|
||||
|
||||
this.context = [ct.braceStatement];
|
||||
this.exprAllowed = true;
|
||||
|
||||
this.containsEsc = this.containsOctal = false;
|
||||
this.octalPosition = null;
|
||||
|
||||
this.invalidTemplateEscapePosition = null;
|
||||
|
||||
this.exportedIdentifiers = [];
|
||||
}
|
||||
|
||||
// TODO
|
||||
strict: boolean;
|
||||
|
||||
// TODO
|
||||
input: string;
|
||||
|
||||
// Used to signify the start of a potential arrow function
|
||||
potentialArrowAt: number;
|
||||
|
||||
// Used to signify the start of an expression which looks like a
|
||||
// typed arrow function, but it isn't
|
||||
// e.g. a ? (b) : c => d
|
||||
// ^
|
||||
noArrowAt: number[];
|
||||
|
||||
// Used to signify the start of an expression whose params, if it looks like
|
||||
// an arrow function, shouldn't be converted to assignable nodes.
|
||||
// This is used to defer the validation of typed arrow functions inside
|
||||
// conditional expressions.
|
||||
// e.g. a ? (b) : c => d
|
||||
// ^
|
||||
noArrowParamsConversionAt: number[];
|
||||
|
||||
// Flags to track whether we are in a function, a generator.
|
||||
inFunction: boolean;
|
||||
inParameters: boolean;
|
||||
maybeInArrowParameters: boolean;
|
||||
inGenerator: boolean;
|
||||
inMethod: boolean | N.MethodKind;
|
||||
inAsync: boolean;
|
||||
inType: boolean;
|
||||
noAnonFunctionType: boolean;
|
||||
inPropertyName: boolean;
|
||||
inClassProperty: boolean;
|
||||
hasFlowComment: boolean;
|
||||
isIterator: boolean;
|
||||
|
||||
// Check whether we are in a (nested) class or not.
|
||||
classLevel: number;
|
||||
|
||||
// Labels in scope.
|
||||
labels: Array<{ kind: ?("loop" | "switch"), statementStart?: number }>;
|
||||
|
||||
// Leading decorators. Last element of the stack represents the decorators in current context.
|
||||
// Supports nesting of decorators, e.g. @foo(@bar class inner {}) class outer {}
|
||||
// where @foo belongs to the outer class and @bar to the inner
|
||||
decoratorStack: Array<Array<N.Decorator>>;
|
||||
|
||||
// The first yield expression inside parenthesized expressions and arrow
|
||||
// function parameters. It is used to disallow yield in arrow function
|
||||
// parameters.
|
||||
yieldInPossibleArrowParameters: ?N.YieldExpression;
|
||||
|
||||
// Token store.
|
||||
tokens: Array<Token | N.Comment>;
|
||||
|
||||
// Comment store.
|
||||
comments: Array<N.Comment>;
|
||||
|
||||
// Comment attachment store
|
||||
trailingComments: Array<N.Comment>;
|
||||
leadingComments: Array<N.Comment>;
|
||||
commentStack: Array<{
|
||||
start: number,
|
||||
leadingComments: ?Array<N.Comment>,
|
||||
trailingComments: ?Array<N.Comment>,
|
||||
}>;
|
||||
commentPreviousNode: N.Node;
|
||||
|
||||
// The current position of the tokenizer in the input.
|
||||
pos: number;
|
||||
lineStart: number;
|
||||
curLine: number;
|
||||
|
||||
// Properties of the current token:
|
||||
// Its type
|
||||
type: TokenType;
|
||||
|
||||
// For tokens that include more information than their type, the value
|
||||
value: any;
|
||||
|
||||
// Its start and end offset
|
||||
start: number;
|
||||
end: number;
|
||||
|
||||
// And, if locations are used, the {line, column} object
|
||||
// corresponding to those offsets
|
||||
startLoc: Position;
|
||||
endLoc: Position;
|
||||
|
||||
// Position information for the previous token
|
||||
lastTokEndLoc: Position;
|
||||
lastTokStartLoc: Position;
|
||||
lastTokStart: number;
|
||||
lastTokEnd: number;
|
||||
|
||||
// The context stack is used to superficially track syntactic
|
||||
// context to predict whether a regular expression is allowed in a
|
||||
// given position.
|
||||
context: Array<TokContext>;
|
||||
exprAllowed: boolean;
|
||||
|
||||
// Used to signal to callers of `readWord1` whether the word
|
||||
// contained any escape sequences. This is needed because words with
|
||||
// escape sequences must not be interpreted as keywords.
|
||||
containsEsc: boolean;
|
||||
|
||||
// TODO
|
||||
containsOctal: boolean;
|
||||
octalPosition: ?number;
|
||||
|
||||
// Names of exports store. `default` is stored as a name for both
|
||||
// `export default foo;` and `export { foo as default };`.
|
||||
exportedIdentifiers: Array<string>;
|
||||
|
||||
invalidTemplateEscapePosition: ?number;
|
||||
|
||||
curPosition(): Position {
|
||||
return new Position(this.curLine, this.pos - this.lineStart);
|
||||
}
|
||||
|
||||
clone(skipArrays?: boolean): State {
|
||||
const state = new State();
|
||||
Object.keys(this).forEach(key => {
|
||||
// $FlowIgnore
|
||||
let val = this[key];
|
||||
|
||||
if ((!skipArrays || key === "context") && Array.isArray(val)) {
|
||||
val = val.slice();
|
||||
}
|
||||
|
||||
// $FlowIgnore
|
||||
state[key] = val;
|
||||
});
|
||||
return state;
|
||||
}
|
||||
}
|
||||
198
packages/babel-parser/src/tokenizer/types.js
Normal file
198
packages/babel-parser/src/tokenizer/types.js
Normal file
@@ -0,0 +1,198 @@
|
||||
// @flow
|
||||
|
||||
// ## Token types
|
||||
|
||||
// The assignment of fine-grained, information-carrying type objects
|
||||
// allows the tokenizer to store the information it has about a
|
||||
// token in a way that is very cheap for the parser to look up.
|
||||
|
||||
// All token type variables start with an underscore, to make them
|
||||
// easy to recognize.
|
||||
|
||||
// The `beforeExpr` property is used to disambiguate between regular
|
||||
// expressions and divisions. It is set on all token types that can
|
||||
// be followed by an expression (thus, a slash after them would be a
|
||||
// regular expression).
|
||||
//
|
||||
// `isLoop` marks a keyword as starting a loop, which is important
|
||||
// to know when parsing a label, in order to allow or disallow
|
||||
// continue jumps to that label.
|
||||
|
||||
const beforeExpr = true;
|
||||
const startsExpr = true;
|
||||
const isLoop = true;
|
||||
const isAssign = true;
|
||||
const prefix = true;
|
||||
const postfix = true;
|
||||
|
||||
type TokenOptions = {
|
||||
keyword?: string,
|
||||
|
||||
beforeExpr?: boolean,
|
||||
startsExpr?: boolean,
|
||||
rightAssociative?: boolean,
|
||||
isLoop?: boolean,
|
||||
isAssign?: boolean,
|
||||
prefix?: boolean,
|
||||
postfix?: boolean,
|
||||
binop?: ?number,
|
||||
};
|
||||
|
||||
export class TokenType {
|
||||
label: string;
|
||||
keyword: ?string;
|
||||
beforeExpr: boolean;
|
||||
startsExpr: boolean;
|
||||
rightAssociative: boolean;
|
||||
isLoop: boolean;
|
||||
isAssign: boolean;
|
||||
prefix: boolean;
|
||||
postfix: boolean;
|
||||
binop: ?number;
|
||||
updateContext: ?(prevType: TokenType) => void;
|
||||
|
||||
constructor(label: string, conf: TokenOptions = {}) {
|
||||
this.label = label;
|
||||
this.keyword = conf.keyword;
|
||||
this.beforeExpr = !!conf.beforeExpr;
|
||||
this.startsExpr = !!conf.startsExpr;
|
||||
this.rightAssociative = !!conf.rightAssociative;
|
||||
this.isLoop = !!conf.isLoop;
|
||||
this.isAssign = !!conf.isAssign;
|
||||
this.prefix = !!conf.prefix;
|
||||
this.postfix = !!conf.postfix;
|
||||
this.binop = conf.binop === 0 ? 0 : conf.binop || null;
|
||||
this.updateContext = null;
|
||||
}
|
||||
}
|
||||
|
||||
class KeywordTokenType extends TokenType {
|
||||
constructor(name: string, options: TokenOptions = {}) {
|
||||
options.keyword = name;
|
||||
|
||||
super(name, options);
|
||||
}
|
||||
}
|
||||
|
||||
export class BinopTokenType extends TokenType {
|
||||
constructor(name: string, prec: number) {
|
||||
super(name, { beforeExpr, binop: prec });
|
||||
}
|
||||
}
|
||||
|
||||
export const types: { [name: string]: TokenType } = {
|
||||
num: new TokenType("num", { startsExpr }),
|
||||
bigint: new TokenType("bigint", { startsExpr }),
|
||||
regexp: new TokenType("regexp", { startsExpr }),
|
||||
string: new TokenType("string", { startsExpr }),
|
||||
name: new TokenType("name", { startsExpr }),
|
||||
eof: new TokenType("eof"),
|
||||
|
||||
// Punctuation token types.
|
||||
bracketL: new TokenType("[", { beforeExpr, startsExpr }),
|
||||
bracketR: new TokenType("]"),
|
||||
braceL: new TokenType("{", { beforeExpr, startsExpr }),
|
||||
braceBarL: new TokenType("{|", { beforeExpr, startsExpr }),
|
||||
braceR: new TokenType("}"),
|
||||
braceBarR: new TokenType("|}"),
|
||||
parenL: new TokenType("(", { beforeExpr, startsExpr }),
|
||||
parenR: new TokenType(")"),
|
||||
comma: new TokenType(",", { beforeExpr }),
|
||||
semi: new TokenType(";", { beforeExpr }),
|
||||
colon: new TokenType(":", { beforeExpr }),
|
||||
doubleColon: new TokenType("::", { beforeExpr }),
|
||||
dot: new TokenType("."),
|
||||
question: new TokenType("?", { beforeExpr }),
|
||||
questionDot: new TokenType("?."),
|
||||
arrow: new TokenType("=>", { beforeExpr }),
|
||||
template: new TokenType("template"),
|
||||
ellipsis: new TokenType("...", { beforeExpr }),
|
||||
backQuote: new TokenType("`", { startsExpr }),
|
||||
dollarBraceL: new TokenType("${", { beforeExpr, startsExpr }),
|
||||
at: new TokenType("@"),
|
||||
hash: new TokenType("#"),
|
||||
|
||||
// Operators. These carry several kinds of properties to help the
|
||||
// parser use them properly (the presence of these properties is
|
||||
// what categorizes them as operators).
|
||||
//
|
||||
// `binop`, when present, specifies that this operator is a binary
|
||||
// operator, and will refer to its precedence.
|
||||
//
|
||||
// `prefix` and `postfix` mark the operator as a prefix or postfix
|
||||
// unary operator.
|
||||
//
|
||||
// `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as
|
||||
// binary operators with a very low precedence, that should result
|
||||
// in AssignmentExpression nodes.
|
||||
|
||||
eq: new TokenType("=", { beforeExpr, isAssign }),
|
||||
assign: new TokenType("_=", { beforeExpr, isAssign }),
|
||||
incDec: new TokenType("++/--", { prefix, postfix, startsExpr }),
|
||||
bang: new TokenType("!", { beforeExpr, prefix, startsExpr }),
|
||||
tilde: new TokenType("~", { beforeExpr, prefix, startsExpr }),
|
||||
pipeline: new BinopTokenType("|>", 0),
|
||||
nullishCoalescing: new BinopTokenType("??", 1),
|
||||
logicalOR: new BinopTokenType("||", 1),
|
||||
logicalAND: new BinopTokenType("&&", 2),
|
||||
bitwiseOR: new BinopTokenType("|", 3),
|
||||
bitwiseXOR: new BinopTokenType("^", 4),
|
||||
bitwiseAND: new BinopTokenType("&", 5),
|
||||
equality: new BinopTokenType("==/!=", 6),
|
||||
relational: new BinopTokenType("</>", 7),
|
||||
bitShift: new BinopTokenType("<</>>", 8),
|
||||
plusMin: new TokenType("+/-", { beforeExpr, binop: 9, prefix, startsExpr }),
|
||||
modulo: new BinopTokenType("%", 10),
|
||||
star: new BinopTokenType("*", 10),
|
||||
slash: new BinopTokenType("/", 10),
|
||||
exponent: new TokenType("**", {
|
||||
beforeExpr,
|
||||
binop: 11,
|
||||
rightAssociative: true,
|
||||
}),
|
||||
};
|
||||
|
||||
export const keywords = {
|
||||
break: new KeywordTokenType("break"),
|
||||
case: new KeywordTokenType("case", { beforeExpr }),
|
||||
catch: new KeywordTokenType("catch"),
|
||||
continue: new KeywordTokenType("continue"),
|
||||
debugger: new KeywordTokenType("debugger"),
|
||||
default: new KeywordTokenType("default", { beforeExpr }),
|
||||
do: new KeywordTokenType("do", { isLoop, beforeExpr }),
|
||||
else: new KeywordTokenType("else", { beforeExpr }),
|
||||
finally: new KeywordTokenType("finally"),
|
||||
for: new KeywordTokenType("for", { isLoop }),
|
||||
function: new KeywordTokenType("function", { startsExpr }),
|
||||
if: new KeywordTokenType("if"),
|
||||
return: new KeywordTokenType("return", { beforeExpr }),
|
||||
switch: new KeywordTokenType("switch"),
|
||||
throw: new KeywordTokenType("throw", { beforeExpr, prefix, startsExpr }),
|
||||
try: new KeywordTokenType("try"),
|
||||
var: new KeywordTokenType("var"),
|
||||
let: new KeywordTokenType("let"),
|
||||
const: new KeywordTokenType("const"),
|
||||
while: new KeywordTokenType("while", { isLoop }),
|
||||
with: new KeywordTokenType("with"),
|
||||
new: new KeywordTokenType("new", { beforeExpr, startsExpr }),
|
||||
this: new KeywordTokenType("this", { startsExpr }),
|
||||
super: new KeywordTokenType("super", { startsExpr }),
|
||||
class: new KeywordTokenType("class"),
|
||||
extends: new KeywordTokenType("extends", { beforeExpr }),
|
||||
export: new KeywordTokenType("export"),
|
||||
import: new KeywordTokenType("import", { startsExpr }),
|
||||
yield: new KeywordTokenType("yield", { beforeExpr, startsExpr }),
|
||||
null: new KeywordTokenType("null", { startsExpr }),
|
||||
true: new KeywordTokenType("true", { startsExpr }),
|
||||
false: new KeywordTokenType("false", { startsExpr }),
|
||||
in: new KeywordTokenType("in", { beforeExpr, binop: 7 }),
|
||||
instanceof: new KeywordTokenType("instanceof", { beforeExpr, binop: 7 }),
|
||||
typeof: new KeywordTokenType("typeof", { beforeExpr, prefix, startsExpr }),
|
||||
void: new KeywordTokenType("void", { beforeExpr, prefix, startsExpr }),
|
||||
delete: new KeywordTokenType("delete", { beforeExpr, prefix, startsExpr }),
|
||||
};
|
||||
|
||||
// Map keyword names to token types.
|
||||
Object.keys(keywords).forEach(name => {
|
||||
types["_" + name] = keywords[name];
|
||||
});
|
||||
Reference in New Issue
Block a user