Skip to content

Commit

Permalink
Add extra newlines in pretty token streams (PrismJS#2070)
Browse files Browse the repository at this point in the history
The formatter for the simplified token streams of our test cases will now emit extra (empty) newlines if the tested code also has an empty line at those positions.
  • Loading branch information
RunDevelopment authored and quentinvernot committed Sep 11, 2020
1 parent fef72d1 commit 8b45d2c
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 24 deletions.
4 changes: 2 additions & 2 deletions tests/helper/prism-loader.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const coreChecks = require('./checks');

/**
* @typedef PrismLoaderContext
* @property {any} Prism The Prism instance.
* @property {import('../../components/prism-core')} Prism The Prism instance.
* @property {Set<string>} loaded A set of loaded components.
*/

Expand All @@ -27,7 +27,7 @@ module.exports = {
* Creates a new Prism instance with the given language loaded
*
* @param {string|string[]} languages
* @returns {Prism}
* @returns {import('../../components/prism-core')}
*/
createInstance(languages) {
let context = {
Expand Down
2 changes: 1 addition & 1 deletion tests/helper/test-case.js
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ module.exports = {
*
* The `before-tokenize` and `after-tokenize` hooks will also be executed.
*
* @param {any} Prism The Prism instance which will tokenize `code`.
* @param {import('../../components/prism-core')} Prism The Prism instance which will tokenize `code`.
* @param {string} code The code to tokenize.
* @param {string} language The language id.
* @returns {Array<string|Array<string|any[]>>}
Expand Down
62 changes: 44 additions & 18 deletions tests/helper/token-stream-transformer.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ module.exports = {
/**
* @typedef TokenStreamItem
* @property {string} type
* @property {string | TokenStreamItem | Array<string|TokenStreamItem>} content
* @property {string | Array<string|TokenStreamItem>} content
*
* @typedef {Array<string | [string, string | Array]>} SimplifiedTokenStream
*/

/**
Expand All @@ -15,29 +17,46 @@ module.exports = {
* * In arrays each value is transformed individually
* * Values that are empty (empty arrays or strings only containing whitespace)
*
* @param {string | TokenStreamItem | Array<string|TokenStreamItem>} tokenStream
* @returns {Array<string|Array<string|any[]>>}
* @param {Array<string|TokenStreamItem>} tokenStream
* @returns {SimplifiedTokenStream}
*/
simplify(tokenStream) {
if (Array.isArray(tokenStream)) {
return tokenStream
.map(value => this.simplify(value))
.filter(value => {
return !(Array.isArray(value) && !value.length) && !(typeof value === "string" && !value.trim().length);
});
}
else if (typeof tokenStream === "object") {
return [tokenStream.type, this.simplify(tokenStream.content)];
}
else {
return tokenStream;
simplify: function simplify(tokenStream) {
return tokenStream
.map(innerSimple)
.filter((value, i, arr) => {
if (typeof value === "string" && !value.trim().length) {
// string contains only spaces
if (i > 0 && i < arr.length - 1 && value.split(/\r\n?|\n/g).length > 2) {
// in a valid token stream there are no adjacent strings, so we know that the previous
// element is a (simplified) token
arr[i - 1]['newline-after'] = true;
}
return false;
}
return true;
});

/**
* @param {string | TokenStreamItem} value
* @returns {string | [string, string | Array]}
*/
function innerSimple(value) {
if (typeof value === "object") {
if (Array.isArray(value.content)) {
return [value.type, simplify(value.content)];
} else {
return [value.type, value.content];
}
} else {
return value;
}
}
},

/**
*
* @param {ReadonlyArray<string|ReadonlyArray<string|any[]>>} tokenStream
* @param {number} [indentationLevel=0]
* @param {Readonly<SimplifiedTokenStream>} tokenStream
* @param {number} [indentationLevel]
*/
prettyprint(tokenStream, indentationLevel = 1) {
const indentChar = ' ';
Expand All @@ -49,6 +68,7 @@ module.exports = {
out += "[\n"
tokenStream.forEach((item, i) => {
out += indentation;
let extraNewline = false;

if (typeof item === 'string') {
out += JSON.stringify(item);
Expand All @@ -65,10 +85,16 @@ module.exports = {
}

out += ']';

extraNewline = !!item['newline-after'];
}

const lineEnd = (i === tokenStream.length - 1) ? '\n' : ',\n';
out += lineEnd;

if (extraNewline) {
out += '\n';
}
})
out += indentation.substr(indentChar.length) + ']'
return out;
Expand Down
6 changes: 3 additions & 3 deletions tests/testrunner-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ describe("The token stream transformer", function () {
{
type: "type",
content: [
["", ""],
"",
{ type: "nested", content: [""] }
{ type: "nested", content: [""] },
""
]
},
[[[[[[[""]]]]]]]
""
];

const expectedSimplified = [
Expand Down

0 comments on commit 8b45d2c

Please sign in to comment.