Skip to content

Commit 8bfcc81

Browse files
Tests: Better pretty print (#2600)
This is a major improvement for the pretty-printer. The main change is that `TokenStreamTransformer.prettyprint` now takes a Prism token stream instead of a simplified token stream. This means that it has a lot more information to work with and it uses that information. It mostly behaves like the old pretty-printer with 3 main differences: 1. Tokens that contain a nested token stream that only contains a single string are printed as one line. Example: `["namespace", ["java"]]` 2. The pretty-printer can now glue tokens together so that they are printed in a single line. Example: `["number", "100"], ["operator", ".."], ["number", "100"]` 3. Line breaks in the input code may now cause empty lines in the output. Previously, only blank strings containing more at least 2 line breaks produce empty lines. This helps readability as it groups tokens together.
1 parent f0e2b70 commit 8bfcc81

File tree

4 files changed

+435
-54
lines changed

4 files changed

+435
-54
lines changed

tests/core/greedy.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,14 @@
33
const { assert } = require('chai');
44
const PrismLoader = require('../helper/prism-loader');
55
const TestCase = require('../helper/test-case');
6+
const TokenStreamTransformer = require('../helper/token-stream-transformer');
67

78

89
function testTokens({ grammar, code, expected }) {
910
const Prism = PrismLoader.createEmptyPrism();
1011
Prism.languages.test = grammar;
1112

12-
const simpleTokens = TestCase.simpleTokenize(Prism, code, 'test');
13+
const simpleTokens = TokenStreamTransformer.simplify(TestCase.tokenize(Prism, code, 'test'));
1314

1415
assert.deepStrictEqual(simpleTokens, expected);
1516
}

tests/helper/test-case.js

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@ const { assert } = require("chai");
55
const PrismLoader = require("./prism-loader");
66
const TokenStreamTransformer = require("./token-stream-transformer");
77

8+
/**
9+
* @typedef {import("./token-stream-transformer").TokenStream} TokenStream
10+
*/
11+
812
/**
913
* Handles parsing of a test case file.
1014
*
@@ -58,7 +62,7 @@ module.exports = {
5862
const Prism = PrismLoader.createInstance(usedLanguages.languages);
5963

6064
// the first language is the main language to highlight
61-
const simplifiedTokenStream = this.simpleTokenize(Prism, testCase.code, usedLanguages.mainLanguage);
65+
const tokenStream = this.tokenize(Prism, testCase.code, usedLanguages.mainLanguage);
6266

6367
if (testCase.expectedTokenStream === null) {
6468
// the test case doesn't have an expected value
@@ -71,7 +75,7 @@ module.exports = {
7175
// change the file
7276
const lineEnd = (/\r\n/.test(testCase.code) || !/\n/.test(testCase.code)) ? '\r\n' : '\n';
7377
const separator = "\n\n----------------------------------------------------\n\n";
74-
const pretty = TokenStreamTransformer.prettyprint(simplifiedTokenStream)
78+
const pretty = TokenStreamTransformer.prettyprint(tokenStream)
7579
.replace(/^( +)/gm, m => {
7680
return "\t".repeat(m.length / 4);
7781
});
@@ -80,11 +84,14 @@ module.exports = {
8084
if (testCase.comment) {
8185
content += separator + testCase.comment;
8286
}
87+
//content += '\n'
8388
content = content.replace(/\r?\n/g, lineEnd);
8489

8590
fs.writeFileSync(filePath, content, "utf-8");
8691
} else {
8792
// there is an expected value
93+
const simplifiedTokenStream = TokenStreamTransformer.simplify(tokenStream);
94+
8895
const actual = JSON.stringify(simplifiedTokenStream);
8996
const expected = JSON.stringify(testCase.expectedTokenStream);
9097

@@ -100,7 +107,7 @@ module.exports = {
100107
const columnNumber = expectedJsonLines.pop().length + 1;
101108
const lineNumber = testCase.expectedLineOffset + expectedJsonLines.length;
102109

103-
const tokenStreamStr = TokenStreamTransformer.prettyprint(simplifiedTokenStream);
110+
const tokenStreamStr = TokenStreamTransformer.prettyprint(tokenStream);
104111
const message = "\n\nActual Token Stream:" +
105112
"\n-----------------------------------------\n" +
106113
tokenStreamStr +
@@ -112,16 +119,16 @@ module.exports = {
112119
},
113120

114121
/**
115-
* Returns the simplified token stream of the given code highlighted with `language`.
122+
* Returns the token stream of the given code highlighted with `language`.
116123
*
117124
* The `before-tokenize` and `after-tokenize` hooks will also be executed.
118125
*
119126
* @param {import('../../components/prism-core')} Prism The Prism instance which will tokenize `code`.
120127
* @param {string} code The code to tokenize.
121128
* @param {string} language The language id.
122-
* @returns {Array<string|Array<string|any[]>>}
129+
* @returns {TokenStream}
123130
*/
124-
simpleTokenize(Prism, code, language) {
131+
tokenize(Prism, code, language) {
125132
const env = {
126133
code,
127134
grammar: Prism.languages[language],
@@ -132,7 +139,7 @@ module.exports = {
132139
env.tokens = Prism.tokenize(env.code, env.grammar);
133140
Prism.hooks.run('after-tokenize', env);
134141

135-
return TokenStreamTransformer.simplify(env.tokens);
142+
return env.tokens;
136143
},
137144

138145

@@ -202,7 +209,7 @@ module.exports = {
202209

203210
const code = testCaseParts[0].trim();
204211
const expected = (testCaseParts[1] || '').trim();
205-
const comment = (testCaseParts[2] || '').trim();
212+
const comment = (testCaseParts[2] || '').trimStart();
206213

207214
const testCase = {
208215
code,

0 commit comments

Comments
 (0)