Added example for comments attachment.
* Fixed `onToken` & `tokenize` to return `startLoc` & `endLoc` in token object only when `options.locations` is `true`. * Fixed `onToken` tests. * Added example for generating comments with escodegen.
This commit is contained in:
parent
be11e04383
commit
1bf8c1420f
41
README.md
41
README.md
@ -74,7 +74,7 @@ object referring to that same position.
|
||||
form. Default is `false`.
|
||||
|
||||
- **onToken**: If a function is passed for this option, each found
|
||||
token will be passed in format that `tokenize()` method provides.
|
||||
token will be passed in same format as `tokenize()` returns.
|
||||
|
||||
- **onComment**: If a function is passed for this option, whenever a
|
||||
comment is encountered the function will be called with the
|
||||
@ -132,6 +132,45 @@ can't count on it staying stable.
|
||||
**tokTypes** holds an object mapping names to the token type objects
|
||||
that end up in the `type` properties of tokens.
|
||||
|
||||
#### Note on using with [Escodegen][escodegen]
|
||||
|
||||
Escodegen supports generating comments from AST, attached in
|
||||
Esprima-specific format. In order to simulate same format in
|
||||
Acorn, consider following example (this may be simplified
|
||||
in future):
|
||||
|
||||
```javascript
|
||||
var comments = [], tokens = [];
|
||||
|
||||
var ast = acorn.parse('var x = 42; // answer', {
|
||||
// collect ranges for each node
|
||||
ranges: true,
|
||||
// collect comments in Esprima's format
|
||||
onComment: function (block, text, start, end) {
|
||||
comments.push({
|
||||
type: block ? 'Block' : 'Line',
|
||||
value: text,
|
||||
range: [start, end]
|
||||
});
|
||||
},
|
||||
// collect token ranges
|
||||
onToken: function (token) {
|
||||
tokens.push({
|
||||
range: [token.start, token.end]
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// attach comments using collected information
|
||||
escodegen.attachComments(ast, comments, tokens);
|
||||
|
||||
// generate code
|
||||
console.log(escodegen.generate(ast, {comment: true}));
|
||||
// > 'var x = 42; // answer'
|
||||
```
|
||||
|
||||
[escodegen]: https://github.com/Constellation/escodegen
|
||||
|
||||
### acorn_loose.js ###
|
||||
|
||||
This file implements an error-tolerant parser. It exposes a single
|
||||
|
||||
31
acorn.js
31
acorn.js
@ -76,7 +76,7 @@
|
||||
locations: false,
|
||||
// A function can be passed as `onToken` option, which will
|
||||
// cause Acorn to call that function with object in the same
|
||||
// format as is used in tokenize(). Note that you are not
|
||||
// format as tokenize() returns. Note that you are not
|
||||
// allowed to call the parser from the callback—that will
|
||||
// corrupt its internal state.
|
||||
onToken: null,
|
||||
@ -141,6 +141,20 @@
|
||||
return {line: line, column: offset - cur};
|
||||
};
|
||||
|
||||
var getCurrentToken = function () {
|
||||
var token = {
|
||||
type: tokType,
|
||||
value: tokVal,
|
||||
start: tokStart,
|
||||
end: tokEnd
|
||||
};
|
||||
if (options.locations) {
|
||||
token.startLoc = tokStartLoc;
|
||||
token.endLoc = tokEndLoc;
|
||||
}
|
||||
return token;
|
||||
};
|
||||
|
||||
// Acorn is organized as a tokenizer and a recursive-descent parser.
|
||||
// The `tokenize` export provides an interface to the tokenizer.
|
||||
// Because the tokenizer is optimized for being efficiently used by
|
||||
@ -153,14 +167,10 @@
|
||||
setOptions(opts);
|
||||
initTokenState();
|
||||
|
||||
var t = {};
|
||||
function getToken(forceRegexp) {
|
||||
lastEnd = tokEnd;
|
||||
readToken(forceRegexp);
|
||||
t.start = tokStart; t.end = tokEnd;
|
||||
t.startLoc = tokStartLoc; t.endLoc = tokEndLoc;
|
||||
t.type = tokType; t.value = tokVal;
|
||||
return t;
|
||||
return getCurrentToken();
|
||||
}
|
||||
getToken.jumpTo = function(pos, reAllowed) {
|
||||
tokPos = pos;
|
||||
@ -533,14 +543,7 @@
|
||||
tokVal = val;
|
||||
tokRegexpAllowed = type.beforeExpr;
|
||||
if (options.onToken) {
|
||||
options.onToken({
|
||||
start: tokStart,
|
||||
end: tokEnd,
|
||||
startLoc: tokStartLoc,
|
||||
endLoc: tokEndLoc,
|
||||
type: tokType,
|
||||
value: tokVal
|
||||
});
|
||||
options.onToken(getCurrentToken());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -28658,80 +28658,76 @@ testFail("for(const x = 0;;);", "Unexpected token (1:4)", {ecmaVersion: 6});
|
||||
var actualTokens = [],
|
||||
expectedTokens = [
|
||||
{
|
||||
type: tokTypes._var,
|
||||
value: "var",
|
||||
start: 0,
|
||||
end: 3,
|
||||
startLoc: {line: 1, column: 0},
|
||||
endLoc: {line: 1, column: 3},
|
||||
type: tokTypes._var,
|
||||
value: "var"
|
||||
endLoc: {line: 1, column: 3}
|
||||
},
|
||||
{
|
||||
type: tokTypes.name,
|
||||
value: "x",
|
||||
start: 4,
|
||||
end: 5,
|
||||
startLoc: {line: 1, column: 4},
|
||||
endLoc: {line: 1, column: 5},
|
||||
type: tokTypes.name,
|
||||
value: "x"
|
||||
endLoc: {line: 1, column: 5}
|
||||
},
|
||||
{
|
||||
type: tokTypes.eq,
|
||||
value: "=",
|
||||
start: 6,
|
||||
end: 7,
|
||||
startLoc: {line: 1, column: 6},
|
||||
endLoc: {line: 1, column: 7},
|
||||
type: tokTypes.eq,
|
||||
value: "="
|
||||
endLoc: {line: 1, column: 7}
|
||||
},
|
||||
{
|
||||
type: tokTypes.parenL,
|
||||
value: undefined,
|
||||
start: 8,
|
||||
end: 9,
|
||||
startLoc: {line: 1, column: 8},
|
||||
endLoc: {line: 1, column: 9},
|
||||
type: tokTypes.parenL,
|
||||
value: undefined
|
||||
endLoc: {line: 1, column: 9}
|
||||
},
|
||||
{
|
||||
type: tokTypes.num,
|
||||
value: 1,
|
||||
start: 9,
|
||||
end: 10,
|
||||
startLoc: {line: 1, column: 9},
|
||||
endLoc: {line: 1, column: 10},
|
||||
type: tokTypes.num,
|
||||
value: 1
|
||||
endLoc: {line: 1, column: 10}
|
||||
},
|
||||
{
|
||||
type: {binop: 9, prefix: true, beforeExpr: true},
|
||||
value: "+",
|
||||
start: 11,
|
||||
end: 12,
|
||||
startLoc: {line: 1, column: 11},
|
||||
endLoc: {line: 1, column: 12},
|
||||
type: {
|
||||
binop: 9,
|
||||
prefix: true,
|
||||
beforeExpr: true
|
||||
},
|
||||
value: "+"
|
||||
endLoc: {line: 1, column: 12}
|
||||
},
|
||||
{
|
||||
type: tokTypes.num,
|
||||
value: 2,
|
||||
start: 13,
|
||||
end: 14,
|
||||
startLoc: {line: 1, column: 13},
|
||||
endLoc: {line: 1, column: 14},
|
||||
type: tokTypes.num,
|
||||
value: 2
|
||||
endLoc: {line: 1, column: 14}
|
||||
},
|
||||
{
|
||||
type: tokTypes.parenR,
|
||||
value: undefined,
|
||||
start: 14,
|
||||
end: 15,
|
||||
startLoc: {line: 1, column: 14},
|
||||
endLoc: {line: 1, column: 15},
|
||||
type: tokTypes.parenR,
|
||||
value: undefined
|
||||
endLoc: {line: 1, column: 15}
|
||||
},
|
||||
{
|
||||
type: tokTypes.eof,
|
||||
value: undefined,
|
||||
start: 15,
|
||||
end: 15,
|
||||
startLoc: {line: 1, column: 15},
|
||||
endLoc: {line: 1, column: 15},
|
||||
type: tokTypes.eof,
|
||||
value: undefined
|
||||
endLoc: {line: 1, column: 15}
|
||||
}
|
||||
];
|
||||
testAssert('var x = (1 + 2)', function assert(ast) {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user