Skip to content

Commit

Permalink
Fix tracking of token position
Browse files Browse the repository at this point in the history
The line and offset was being dropped when entering into some
sub-tokenizers.
  • Loading branch information
xzyfer committed Dec 3, 2018
1 parent baef3a4 commit e4fca4e
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 25 deletions.
18 changes: 9 additions & 9 deletions __tests__/__snapshots__/interpolant.js.snap
Expand Up @@ -39,15 +39,15 @@ Array [
"number",
"10.00",
2,
10,
3,
2,
14,
7,
],
Array [
"endInterpolant",
"}",
2,
15,
8,
],
Array [
"newline",
Expand All @@ -66,15 +66,15 @@ Array [
"number",
".100",
3,
19,
3,
22,
3,
6,
],
Array [
"endInterpolant",
"}",
3,
23,
7,
],
Array [
"newline",
Expand Down Expand Up @@ -125,15 +125,15 @@ Array [
"number",
"123",
2,
8,
3,
2,
10,
5,
],
Array [
"endInterpolant",
"}",
2,
11,
6,
],
Array [
"newline",
Expand Down
8 changes: 5 additions & 3 deletions src/tokenize-comment.js
Expand Up @@ -15,7 +15,7 @@ let newline = '\n'.charCodeAt(0),
asterisk = '*'.charCodeAt(0),
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\*(?=\/)|#(?={)/g;

export default function tokenize(input, l, p) {
export default function tokenize(input, l, p, o) {
let tokens = [];
let css = input.css.valueOf();

Expand All @@ -24,7 +24,7 @@ export default function tokenize(input, l, p) {
inInterpolant, inComment, inString;

let length = css.length;
let offset = -1;
let offset = o || -1;
let line = l || 1;
let pos = p || 0;

Expand Down Expand Up @@ -102,9 +102,11 @@ export default function tokenize(input, l, p) {
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;

let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
tokens = tokens.concat(t);
next = p;
line = l;
offset = o;

pos = next;
break;
Expand Down
12 changes: 7 additions & 5 deletions src/tokenize-interpolant.js
Expand Up @@ -33,7 +33,7 @@ let singleQuote = "'".charCodeAt(0),
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g,
ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi;

export default function tokenize(input, l, p) {
export default function tokenize(input, l, p, o) {
let tokens = [];
let css = input.css.valueOf();

Expand All @@ -42,7 +42,7 @@ export default function tokenize(input, l, p) {
inInterpolant, inComment, inString;

let length = css.length;
let offset = -1;
let offset = o || -1;
let line = l || 1;
let pos = p || 0;

Expand Down Expand Up @@ -135,9 +135,11 @@ export default function tokenize(input, l, p) {
tokens.push([quote, quote, line, pos - offset]);
next = pos + 1;

let { tokens: t, pos: p } = tokenizeString(input, line, next, quote);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeString(input, line, next, offset, quote);
tokens = tokens.concat(t);
next = p;
line = l;
offset = o;

pos = next;
break;
Expand Down Expand Up @@ -178,7 +180,7 @@ export default function tokenize(input, l, p) {
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
next = pos + 1;

let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1, offset);
tokens = tokens.concat(t);
next = p;
line = l;
Expand Down Expand Up @@ -277,5 +279,5 @@ export default function tokenize(input, l, p) {
pos++;
}

return { tokens, pos };
return { tokens, line, pos, offset };
}
11 changes: 6 additions & 5 deletions src/tokenize-string.js
Expand Up @@ -18,7 +18,7 @@ let singleQuote = "'".charCodeAt(0),
sQuoteEnd = /([.\s]*?)[^\\](?=((#{)|'))/gm,
dQuoteEnd = /([.\s]*?)[^\\](?=((#{)|"))/gm;

export default function tokenize(input, l, p, quote) {
export default function tokenize(input, l, p, o, quote) {
let tokens = [];
let css = input.css.valueOf();

Expand All @@ -27,7 +27,7 @@ export default function tokenize(input, l, p, quote) {
inInterpolant, inComment, inString;

let length = css.length;
let offset = -1;
let offset = o || -1;
let line = l || 1;
let pos = p || 0;

Expand Down Expand Up @@ -81,12 +81,13 @@ export default function tokenize(input, l, p, quote) {
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;

let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
tokens = tokens.concat(t);
next = p;
line = l;
offset = o;

pos = next;

} else {
quoteEnd.lastIndex = pos;
quoteEnd.test(css);
Expand All @@ -111,5 +112,5 @@ export default function tokenize(input, l, p, quote) {
pos++;
}

return { tokens, pos };
return { tokens, line, pos, offset };
}
10 changes: 7 additions & 3 deletions src/tokenize.js
Expand Up @@ -139,9 +139,11 @@ export default function tokenize(input, l, p) {
tokens.push([quote, quote, line, pos - offset]);
next = pos + 1;

let { tokens: t, pos: p } = tokenizeString(input, line, next, quote);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeString(input, line, next, offset, quote);
tokens = tokens.concat(t);
next = p;
line = l;
offset = o;

pos = next;
break;
Expand Down Expand Up @@ -182,7 +184,7 @@ export default function tokenize(input, l, p) {
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
next = pos + 1;

let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1, offset);
tokens = tokens.concat(t);
next = p;
line = l;
Expand Down Expand Up @@ -214,9 +216,11 @@ export default function tokenize(input, l, p) {
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;

let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
tokens = tokens.concat(t);
next = p;
line = l;
offset = o;

pos = next;
break;
Expand Down

0 comments on commit e4fca4e

Please sign in to comment.