Merge branch 'master' into implement-smart-pipeline-in-parser

* master: (222 commits)
  Set correct methods name
  Use toPropertyKey in the "decorate" helper
  Allow function types in type params within arrow return types (#8954)
  Fix message when plugin of a wrong type is passed (#8950)
  rename colliding let bindings with for loop init (#8937)
  edge incomplete support for arrow destructuring (babel #8349) (#8926)
  fix single-arg async arrows when retainLines=true (#8868)
  [flow] Explicit inexact objects with `...` (#8884)
  Update preset-env data (#8898)
  Treat break inside block inside loop (#8914)
  fixed "source map" formatting in comment (#8878) [skip ci]
  fix typo in contributing guidelines (#8901) [skip ci]
  fix: Expression x === 'y' && '' should not evaluate to undefined. (#8880)
  fixed an extra word
  Fixes #8865 (#8866)
  v7.1.4
  v7.1.3
  Bump Babel deps (#8770)
  flow-bin@0.82.0 (#8832)
  Insertafter jsx fix (#8833)
  ...

# Conflicts:
#	packages/babel-parser/src/tokenizer/index.js
#	packages/babel-parser/test/fixtures/experimental/class-private-properties/failure-numeric-literal/options.json
#	packages/babel-parser/test/fixtures/experimental/pipeline-operator/invalid-proposal/options.json
This commit is contained in:
mAAdhaTTah
2018-11-03 14:00:12 -04:00
1882 changed files with 21779 additions and 6653 deletions

View File

@@ -106,7 +106,7 @@ tt.incDec.updateContext = function() {
// tokExprAllowed stays unchanged
};
tt._function.updateContext = function(prevType) {
tt._function.updateContext = tt._class.updateContext = function(prevType) {
if (this.state.exprAllowed && !this.braceIsBlock(prevType)) {
this.state.context.push(types.functionExpression);
}

View File

@@ -16,7 +16,7 @@ import {
lineBreak,
lineBreakG,
isNewLine,
nonASCIIwhitespace,
isWhitespace,
} from "../util/whitespace";
import State from "./state";
@@ -110,18 +110,6 @@ export class Token {
// ## Tokenizer
function codePointToString(code: number): string {
// UTF-16 Decoding
if (code <= 0xffff) {
return String.fromCharCode(code);
} else {
return String.fromCharCode(
((code - 0x10000) >> 10) + 0xd800,
((code - 0x10000) & 1023) + 0xdc00,
);
}
}
export default class Tokenizer extends LocationParser {
// Forward-declarations
// parser/util.js
@@ -226,7 +214,7 @@ export default class Tokenizer extends LocationParser {
if (curContext.override) {
curContext.override(this);
} else {
this.readToken(this.fullCharCodeAtPos());
this.readToken(this.input.codePointAt(this.state.pos));
}
}
@@ -240,14 +228,6 @@ export default class Tokenizer extends LocationParser {
}
}
fullCharCodeAtPos(): number {
const code = this.input.charCodeAt(this.state.pos);
if (code <= 0xd7ff || code >= 0xe000) return code;
const next = this.input.charCodeAt(this.state.pos + 1);
return (code << 10) + next - 0x35fdc00;
}
pushComment(
block: boolean,
text: string,
@@ -331,11 +311,6 @@ export default class Tokenizer extends LocationParser {
loop: while (this.state.pos < this.input.length) {
const ch = this.input.charCodeAt(this.state.pos);
switch (ch) {
case charCodes.space:
case charCodes.nonBreakingSpace:
++this.state.pos;
break;
case charCodes.carriageReturn:
if (
this.input.charCodeAt(this.state.pos + 1) === charCodes.lineFeed
@@ -367,11 +342,7 @@ export default class Tokenizer extends LocationParser {
break;
default:
if (
(ch > charCodes.backSpace && ch < charCodes.shiftOut) ||
(ch >= charCodes.oghamSpaceMark &&
nonASCIIwhitespace.test(String.fromCharCode(ch)))
) {
if (isWhitespace(ch)) {
++this.state.pos;
} else {
break loop;
@@ -455,7 +426,7 @@ export default class Tokenizer extends LocationParser {
readToken_slash(): void {
// '/'
if (this.state.exprAllowed) {
if (this.state.exprAllowed && !this.state.inType) {
++this.state.pos;
this.readRegexp();
return;
@@ -845,7 +816,7 @@ export default class Tokenizer extends LocationParser {
this.raise(
this.state.pos,
`Unexpected character '${codePointToString(code)}'`,
`Unexpected character '${String.fromCodePoint(code)}'`,
);
}
@@ -887,7 +858,7 @@ export default class Tokenizer extends LocationParser {
while (this.state.pos < this.input.length) {
const char = this.input[this.state.pos];
const charCode = this.fullCharCodeAtPos();
const charCode = this.input.codePointAt(this.state.pos);
if (VALID_REGEX_FLAGS.indexOf(char) > -1) {
if (mods.indexOf(char) > -1) {
@@ -999,7 +970,7 @@ export default class Tokenizer extends LocationParser {
}
}
if (isIdentifierStart(this.fullCharCodeAtPos())) {
if (isIdentifierStart(this.input.codePointAt(this.state.pos))) {
this.raise(this.state.pos, "Identifier directly after number");
}
@@ -1055,7 +1026,7 @@ export default class Tokenizer extends LocationParser {
}
}
if (isIdentifierStart(this.fullCharCodeAtPos())) {
if (isIdentifierStart(this.input.codePointAt(this.state.pos))) {
this.raise(this.state.pos, "Identifier directly after number");
}
@@ -1132,6 +1103,7 @@ export default class Tokenizer extends LocationParser {
(ch === charCodes.lineSeparator || ch === charCodes.paragraphSeparator)
) {
++this.state.pos;
++this.state.curLine;
} else if (isNewLine(ch)) {
this.raise(this.state.start, "Unterminated string constant");
} else {
@@ -1224,7 +1196,7 @@ export default class Tokenizer extends LocationParser {
}
case charCodes.lowercaseU: {
const code = this.readCodePoint(throwOnInvalid);
return code === null ? null : codePointToString(code);
return code === null ? null : String.fromCodePoint(code);
}
case charCodes.lowercaseT:
return "\t";
@@ -1302,7 +1274,7 @@ export default class Tokenizer extends LocationParser {
first = true,
chunkStart = this.state.pos;
while (this.state.pos < this.input.length) {
const ch = this.fullCharCodeAtPos();
const ch = this.input.codePointAt(this.state.pos);
if (isIdentifierChar(ch)) {
this.state.pos += ch <= 0xffff ? 1 : 2;
} else if (this.state.isIterator && ch === charCodes.atSign) {
@@ -1328,7 +1300,7 @@ export default class Tokenizer extends LocationParser {
}
// $FlowFixMe
word += codePointToString(esc);
word += String.fromCodePoint(esc);
chunkStart = this.state.pos;
} else {
break;

View File

@@ -66,18 +66,12 @@ export class TokenType {
}
}
class KeywordTokenType extends TokenType {
constructor(name: string, options: TokenOptions = {}) {
options.keyword = name;
super(name, options);
}
function KeywordTokenType(keyword: string, options: TokenOptions = {}) {
return new TokenType(keyword, { ...options, keyword });
}
export class BinopTokenType extends TokenType {
constructor(name: string, prec: number) {
super(name, { beforeExpr, binop: prec });
}
function BinopTokenType(name: string, binop: number) {
return new TokenType(name, { beforeExpr, binop });
}
export const types: { [name: string]: TokenType } = {