Skip to content

Commit 994b2e6

Browse files
UziTechstyfle
andauthored
feat: add async option (#2474)
* fix: return values from walkTokens * docs: add async docs * test: add async test * docs: add nav to async * Update docs/USING_PRO.md Co-authored-by: Steven <[email protected]> * test: expect promise * Update docs/USING_ADVANCED.md Co-authored-by: Steven <[email protected]> Co-authored-by: Steven <[email protected]>
1 parent 33724a3 commit 994b2e6

File tree

8 files changed

+215
-171
lines changed

8 files changed

+215
-171
lines changed

docs/USING_ADVANCED.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ console.log(marked.parse(markdownString));
4444

4545
|Member |Type |Default |Since |Notes |
4646
|:-----------|:---------|:--------|:--------|:-------------|
47+
|async |`boolean` |`false` |4.1.0 |If true, `walkTokens` functions can be async and `marked.parse` will return a promise that resolves when all walk tokens functions resolve.|
4748
|baseUrl |`string` |`null` |0.3.9 |A prefix url for any relative link. |
4849
|breaks |`boolean` |`false` |v0.2.7 |If true, add `<br>` on a single line break (copies GitHub behavior on comments, but not on rendered markdown files). Requires `gfm` be `true`.|
4950
|gfm |`boolean` |`true` |v0.2.1 |If true, use approved [GitHub Flavored Markdown (GFM) specification](https://github.github.com/gfm/).|

docs/USING_PRO.md

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -438,6 +438,78 @@ console.log(marked.parse('A Description List:\n'
438438
439439
***
440440
441+
<h2 id="async">Async Marked : <code>async</code></h2>
442+
443+
Marked will return a promise if the `async` option is true. The `async` option will tell marked to await any `walkTokens` functions before parsing the tokens and returning an HTML string.
444+
445+
Simple Example:
446+
447+
```js
448+
const walkTokens = async (token) => {
449+
if (token.type === 'link') {
450+
try {
451+
await fetch(token.href);
452+
} catch (ex) {
453+
token.title = 'invalid';
454+
}
455+
}
456+
};
457+
458+
marked.use({ walkTokens, async: true });
459+
460+
const markdown = `
461+
[valid link](https://example.com)
462+
463+
[invalid link](https://invalidurl.com)
464+
`;
465+
466+
const html = await marked.parse(markdown);
467+
```
468+
469+
Custom Extension Example:
470+
471+
```js
472+
const importUrl = {
473+
extensions: [{
474+
name: 'importUrl',
475+
level: 'block',
476+
start(src) { return src.indexOf('\n:'); },
477+
tokenizer(src) {
478+
const rule = /^:(https?:\/\/.+?):/;
479+
const match = rule.exec(src);
480+
if (match) {
481+
return {
482+
type: 'importUrl',
483+
raw: match[0],
484+
url: match[1],
485+
html: '' // will be replaced in walkTokens
486+
};
487+
}
488+
},
489+
renderer(token) {
490+
return token.html;
491+
}
492+
}],
493+
async: true, // needed to tell marked to return a promise
494+
async walkTokens(token) {
495+
if (token.type === 'importUrl') {
496+
const res = await fetch(token.url);
497+
token.html = await res.text();
498+
}
499+
}
500+
};
501+
502+
marked.use(importUrl);
503+
504+
const markdown = `
505+
# example.com
506+
507+
:https://example.com:
508+
`;
509+
510+
const html = await marked.parse(markdown);
511+
```
512+
441513
<h2 id="lexer">The Lexer</h2>
442514
443515
The lexer takes a markdown string and calls the tokenizer functions.

docs/_document.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ <h1>Marked Documentation</h1>
5151
<li><a href="/using_pro#tokenizer">Tokenizer</a></li>
5252
<li><a href="/using_pro#walk-tokens">Walk Tokens</a></li>
5353
<li><a href="/using_pro#extensions">Custom Extensions</a></li>
54+
<li><a href="/using_pro#async">Async Marked</a></li>
5455
<li><a href="/using_pro#lexer">Lexer</a></li>
5556
<li><a href="/using_pro#parser">Parser</a></li>
5657
</ul>

src/Tokenizer.js

Lines changed: 21 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ function outputLink(cap, link, raw, lexer) {
1919
href,
2020
title,
2121
text,
22-
tokens: lexer.inlineTokens(text, [])
22+
tokens: lexer.inlineTokens(text)
2323
};
2424
lexer.state.inLink = false;
2525
return token;
@@ -125,15 +125,13 @@ export class Tokenizer {
125125
}
126126
}
127127

128-
const token = {
128+
return {
129129
type: 'heading',
130130
raw: cap[0],
131131
depth: cap[1].length,
132132
text,
133-
tokens: []
133+
tokens: this.lexer.inline(text)
134134
};
135-
this.lexer.inline(token.text, token.tokens);
136-
return token;
137135
}
138136
}
139137

@@ -355,10 +353,10 @@ export class Tokenizer {
355353
text: cap[0]
356354
};
357355
if (this.options.sanitize) {
356+
const text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
358357
token.type = 'paragraph';
359-
token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
360-
token.tokens = [];
361-
this.lexer.inline(token.text, token.tokens);
358+
token.text = text;
359+
token.tokens = this.lexer.inline(text);
362360
}
363361
return token;
364362
}
@@ -416,17 +414,15 @@ export class Tokenizer {
416414
// header child tokens
417415
l = item.header.length;
418416
for (j = 0; j < l; j++) {
419-
item.header[j].tokens = [];
420-
this.lexer.inline(item.header[j].text, item.header[j].tokens);
417+
item.header[j].tokens = this.lexer.inline(item.header[j].text);
421418
}
422419

423420
// cell child tokens
424421
l = item.rows.length;
425422
for (j = 0; j < l; j++) {
426423
row = item.rows[j];
427424
for (k = 0; k < row.length; k++) {
428-
row[k].tokens = [];
429-
this.lexer.inline(row[k].text, row[k].tokens);
425+
row[k].tokens = this.lexer.inline(row[k].text);
430426
}
431427
}
432428

@@ -438,45 +434,40 @@ export class Tokenizer {
438434
lheading(src) {
439435
const cap = this.rules.block.lheading.exec(src);
440436
if (cap) {
441-
const token = {
437+
return {
442438
type: 'heading',
443439
raw: cap[0],
444440
depth: cap[2].charAt(0) === '=' ? 1 : 2,
445441
text: cap[1],
446-
tokens: []
442+
tokens: this.lexer.inline(cap[1])
447443
};
448-
this.lexer.inline(token.text, token.tokens);
449-
return token;
450444
}
451445
}
452446

453447
paragraph(src) {
454448
const cap = this.rules.block.paragraph.exec(src);
455449
if (cap) {
456-
const token = {
450+
const text = cap[1].charAt(cap[1].length - 1) === '\n'
451+
? cap[1].slice(0, -1)
452+
: cap[1];
453+
return {
457454
type: 'paragraph',
458455
raw: cap[0],
459-
text: cap[1].charAt(cap[1].length - 1) === '\n'
460-
? cap[1].slice(0, -1)
461-
: cap[1],
462-
tokens: []
456+
text,
457+
tokens: this.lexer.inline(text)
463458
};
464-
this.lexer.inline(token.text, token.tokens);
465-
return token;
466459
}
467460
}
468461

469462
text(src) {
470463
const cap = this.rules.block.text.exec(src);
471464
if (cap) {
472-
const token = {
465+
return {
473466
type: 'text',
474467
raw: cap[0],
475468
text: cap[0],
476-
tokens: []
469+
tokens: this.lexer.inline(cap[0])
477470
};
478-
this.lexer.inline(token.text, token.tokens);
479-
return token;
480471
}
481472
}
482473

@@ -645,7 +636,7 @@ export class Tokenizer {
645636
type: 'em',
646637
raw: src.slice(0, lLength + match.index + rLength + 1),
647638
text,
648-
tokens: this.lexer.inlineTokens(text, [])
639+
tokens: this.lexer.inlineTokens(text)
649640
};
650641
}
651642

@@ -655,7 +646,7 @@ export class Tokenizer {
655646
type: 'strong',
656647
raw: src.slice(0, lLength + match.index + rLength + 1),
657648
text,
658-
tokens: this.lexer.inlineTokens(text, [])
649+
tokens: this.lexer.inlineTokens(text)
659650
};
660651
}
661652
}
@@ -696,7 +687,7 @@ export class Tokenizer {
696687
type: 'del',
697688
raw: cap[0],
698689
text: cap[2],
699-
tokens: this.lexer.inlineTokens(cap[2], [])
690+
tokens: this.lexer.inlineTokens(cap[2])
700691
};
701692
}
702693
}

src/defaults.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
export function getDefaults() {
22
return {
3+
async: false,
34
baseUrl: null,
45
breaks: false,
56
extensions: null,

src/marked.js

Lines changed: 30 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -105,13 +105,7 @@ export function marked(src, opt, callback) {
105105
return;
106106
}
107107

108-
try {
109-
const tokens = Lexer.lex(src, opt);
110-
if (opt.walkTokens) {
111-
marked.walkTokens(tokens, opt.walkTokens);
112-
}
113-
return Parser.parse(tokens, opt);
114-
} catch (e) {
108+
function onError(e) {
115109
e.message += '\nPlease report this to https://github.com/markedjs/marked.';
116110
if (opt.silent) {
117111
return '<p>An error occurred:</p><pre>'
@@ -120,6 +114,23 @@ export function marked(src, opt, callback) {
120114
}
121115
throw e;
122116
}
117+
118+
try {
119+
const tokens = Lexer.lex(src, opt);
120+
if (opt.walkTokens) {
121+
if (opt.async) {
122+
return Promise.all(marked.walkTokens(tokens, opt.walkTokens))
123+
.then(() => {
124+
return Parser.parse(tokens, opt);
125+
})
126+
.catch(onError);
127+
}
128+
marked.walkTokens(tokens, opt.walkTokens);
129+
}
130+
return Parser.parse(tokens, opt);
131+
} catch (e) {
132+
onError(e);
133+
}
123134
}
124135

125136
/**
@@ -236,10 +247,12 @@ marked.use = function(...args) {
236247
if (pack.walkTokens) {
237248
const walkTokens = marked.defaults.walkTokens;
238249
opts.walkTokens = function(token) {
239-
pack.walkTokens.call(this, token);
250+
let values = [];
251+
values.push(pack.walkTokens.call(this, token));
240252
if (walkTokens) {
241-
walkTokens.call(this, token);
253+
values = values.concat(walkTokens.call(this, token));
242254
}
255+
return values;
243256
};
244257
}
245258

@@ -256,35 +269,37 @@ marked.use = function(...args) {
256269
*/
257270

258271
marked.walkTokens = function(tokens, callback) {
272+
let values = [];
259273
for (const token of tokens) {
260-
callback.call(marked, token);
274+
values = values.concat(callback.call(marked, token));
261275
switch (token.type) {
262276
case 'table': {
263277
for (const cell of token.header) {
264-
marked.walkTokens(cell.tokens, callback);
278+
values = values.concat(marked.walkTokens(cell.tokens, callback));
265279
}
266280
for (const row of token.rows) {
267281
for (const cell of row) {
268-
marked.walkTokens(cell.tokens, callback);
282+
values = values.concat(marked.walkTokens(cell.tokens, callback));
269283
}
270284
}
271285
break;
272286
}
273287
case 'list': {
274-
marked.walkTokens(token.items, callback);
288+
values = values.concat(marked.walkTokens(token.items, callback));
275289
break;
276290
}
277291
default: {
278292
if (marked.defaults.extensions && marked.defaults.extensions.childTokens && marked.defaults.extensions.childTokens[token.type]) { // Walk any extensions
279293
marked.defaults.extensions.childTokens[token.type].forEach(function(childTokens) {
280-
marked.walkTokens(token[childTokens], callback);
294+
values = values.concat(marked.walkTokens(token[childTokens], callback));
281295
});
282296
} else if (token.tokens) {
283-
marked.walkTokens(token.tokens, callback);
297+
values = values.concat(marked.walkTokens(token.tokens, callback));
284298
}
285299
}
286300
}
287301
}
302+
return values;
288303
};
289304

290305
/**

0 commit comments

Comments
 (0)