Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Inline tokens #1627

Merged
merged 11 commits into from Apr 8, 2020
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions .eslintrc.json
Expand Up @@ -17,6 +17,7 @@
"one-var": "off",
"no-control-regex": "off",
"no-prototype-builtins": "off",
"no-extra-semi": "error",

"prefer-const": "error",
"no-var": "error"
Expand Down
109 changes: 83 additions & 26 deletions docs/USING_PRO.md
Expand Up @@ -4,7 +4,7 @@ To champion the single-responsibility and open/closed principles, we have tried

<h2 id="renderer">The renderer</h2>

The renderer is...
The renderer defines the output of the parser.

**Example:** Overriding default heading token by adding an embedded anchor tag like on GitHub.

Expand All @@ -29,7 +29,7 @@ renderer.heading = function (text, level) {
};

// Run marked
console.log(marked('# heading+', { renderer: renderer }));
console.log(marked('# heading+', { renderer }));
```

**Output:**
Expand Down Expand Up @@ -91,12 +91,11 @@ slugger.slug('foo-1') // foo-1-2

<h2 id="lexer">The lexer</h2>

The lexer is...

The lexer turns a markdown string into tokens.

<h2 id="parser">The parser</h2>

The parser is...
The parser takes tokens as input and calls the renderer functions.

***

Expand All @@ -105,30 +104,46 @@ The parser is...
You also have direct access to the lexer and parser if you so desire.

``` js
const tokens = marked.lexer(text, options);
const tokens = marked.lexer(markdown, options);
console.log(marked.parser(tokens, options));
```

``` js
const lexer = new marked.Lexer(options);
const tokens = lexer.lex(text);
const tokens = lexer.lex(markdown);
console.log(tokens);
console.log(lexer.rules);
console.log(lexer.rules.block); // block level rules
console.log(lexer.rules.inline); // inline level rules
```

``` bash
$ node
> require('marked').lexer('> i am using marked.')
[ { type: 'blockquote_start' },
{ type: 'paragraph',
text: 'i am using marked.' },
{ type: 'blockquote_end' },
links: {} ]
> require('marked').lexer('> I am using marked.')
[
{
type: "blockquote",
raw: "> I am using marked.",
tokens: [
{
type: "paragraph",
raw: "I am using marked.",
text: "I am using marked.",
tokens: [
{
type: "text",
raw: "I am using marked.",
text: "I am using marked."
}
]
}
]
},
links: {}
]
```

The Lexers build an array of tokens, which will be passed to their respective
Parsers. The Parsers process each token in the token arrays,
which are removed from the array of tokens:
The Lexer builds an array of tokens, which will be passed to the Parser.
The Parser processes each token in the token array:

``` js
const marked = require('marked');
Expand All @@ -146,18 +161,60 @@ console.log(tokens);

const html = marked.parser(tokens);
console.log(html);

console.log(tokens);
```

``` bash
[ { type: 'heading', depth: 1, text: 'heading' },
{ type: 'paragraph', text: ' [link][1]' },
{ type: 'space' },
links: { '1': { href: '#heading', title: 'heading' } } ]

[
{
type: "heading",
raw: " # heading\n\n",
depth: 1,
text: "heading",
tokens: [
{
type: "text",
raw: "heading",
text: "heading"
}
]
},
{
type: "paragraph",
raw: " [link][1]",
text: " [link][1]",
tokens: [
{
type: "text",
raw: " ",
text: " "
},
{
type: "link",
raw: "[link][1]",
text: "link",
href: "#heading",
title: "heading",
tokens: [
{
type: "text",
raw: "link",
text: "link"
}
]
}
]
},
{
type: "space",
raw: "\n\n"
},
links: {
"1": {
href: "#heading",
title: "heading"
}
}
]
<h1 id="heading">heading</h1>
<p> <a href="#heading" title="heading">link</a></p>

[ links: { '1': { href: '#heading', title: 'heading' } } ]
```
14 changes: 7 additions & 7 deletions docs/demo/demo.js
Expand Up @@ -183,7 +183,7 @@ function handleIframeLoad() {

function handleInput() {
inputDirty = true;
};
}

function handleVersionChange() {
if ($markedVerElem.value === 'commit' || $markedVerElem.value === 'pr') {
Expand Down Expand Up @@ -256,7 +256,7 @@ function handleChange(panes, visiblePane) {
}
}
return active;
};
}

function addCommitVersion(value, text, commit) {
if (markedVersions[value]) {
Expand Down Expand Up @@ -331,13 +331,13 @@ function jsonString(input) {
.replace(/[\\"']/g, '\\$&')
.replace(/\u0000/g, '\\0');
return '"' + output + '"';
};
}

function getScrollSize() {
var e = $activeOutputElem;

return e.scrollHeight - e.clientHeight;
};
}

function getScrollPercent() {
var size = getScrollSize();
Expand All @@ -347,11 +347,11 @@ function getScrollPercent() {
}

return $activeOutputElem.scrollTop / size;
};
}

function setScrollPercent(percent) {
$activeOutputElem.scrollTop = percent * getScrollSize();
};
}

function updateLink() {
var outputType = '';
Expand Down Expand Up @@ -446,7 +446,7 @@ function checkForChanges() {
}
}
checkChangeTimeout = window.setTimeout(checkForChanges, delayTime);
};
}

function setResponseTime(ms) {
var amount = ms;
Expand Down
41 changes: 29 additions & 12 deletions docs/demo/worker.js
@@ -1,4 +1,5 @@
/* globals marked, unfetch, ES6Promise, Promise */ // eslint-disable-line no-redeclare

if (!self.Promise) {
self.importScripts('https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.js');
self.Promise = ES6Promise;
Expand Down Expand Up @@ -48,28 +49,44 @@ function parse(e) {
case 'parse':
var startTime = new Date();
var lexed = marked.lexer(e.data.markdown, e.data.options);
var lexedList = [];
for (var i = 0; i < lexed.length; i++) {
var lexedLine = [];
for (var j in lexed[i]) {
lexedLine.push(j + ':' + jsonString(lexed[i][j]));
}
lexedList.push('{' + lexedLine.join(', ') + '}');
}
var lexedList = getLexedList(lexed);
var parsed = marked.parser(lexed, e.data.options);
var endTime = new Date();
// setTimeout(function () {
postMessage({
task: e.data.task,
lexed: lexedList.join('\n'),
lexed: lexedList,
parsed: parsed,
time: endTime - startTime
});
// }, 10000);
break;
}
}

function getLexedList(lexed, level) {
level = level || 0;
var lexedList = [];
for (var i = 0; i < lexed.length; i++) {
var lexedLine = [];
for (var j in lexed[i]) {
if (j === 'tokens' || j === 'items') {
lexedLine.push(j + ': [\n' + getLexedList(lexed[i][j], level + 1) + '\n]');
} else {
lexedLine.push(j + ':' + jsonString(lexed[i][j]));
}
}
lexedList.push(stringRepeat(' ', 2 * level) + '{' + lexedLine.join(', ') + '}');
}
return lexedList.join('\n');
}

function stringRepeat(char, times) {
var s = '';
for (var i = 0; i < times; i++) {
s += char;
}
return s;
}

function jsonString(input) {
var output = (input + '')
.replace(/\n/g, '\\n')
Expand All @@ -79,7 +96,7 @@ function jsonString(input) {
.replace(/[\\"']/g, '\\$&')
.replace(/\u0000/g, '\\0');
return '"' + output + '"';
};
}

function loadVersion(ver) {
var promise;
Expand Down