@@ -3,36 +3,44 @@ import {merge} from '../util/core'
3
3
4
4
const cached = { }
5
5
6
- function walkFetchEmbed ( { step = 0 , embedTokens, compile, fetch} , cb ) {
7
- const token = embedTokens [ step ]
6
+ function walkFetchEmbed ( { embedTokens, compile, fetch} , cb ) {
7
+ let token
8
+ let step = 0
9
+ let count = 1
8
10
9
- if ( ! token ) {
11
+ if ( ! embedTokens . length ) {
10
12
return cb ( { } )
11
13
}
12
14
13
- const next = text => {
14
- let embedToken
15
- if ( text ) {
16
- if ( token . embed . type === 'markdown' ) {
17
- embedToken = compile . lexer ( text )
18
- } else if ( token . embed . type === 'code' ) {
19
- embedToken = compile . lexer (
20
- '```' +
21
- token . embed . lang +
22
- '\n' +
23
- text . replace ( / ` / g, '@DOCSIFY_QM@' ) +
24
- '\n```\n'
25
- )
15
+ while ( ( token = embedTokens [ step ++ ] ) ) {
16
+ const next = ( function ( token ) {
17
+ return text => {
18
+ let embedToken
19
+ if ( text ) {
20
+ if ( token . embed . type === 'markdown' ) {
21
+ embedToken = compile . lexer ( text )
22
+ } else if ( token . embed . type === 'code' ) {
23
+ embedToken = compile . lexer (
24
+ '```' +
25
+ token . embed . lang +
26
+ '\n' +
27
+ text . replace ( / ` / g, '@DOCSIFY_QM@' ) +
28
+ '\n```\n'
29
+ )
30
+ }
31
+ }
32
+ cb ( { token, embedToken} )
33
+ if ( ++ count >= step ) {
34
+ cb ( { } )
35
+ }
26
36
}
27
- }
28
- cb ( { token, embedToken} )
29
- walkFetchEmbed ( { step : ++ step , compile, embedTokens, fetch} , cb )
30
- }
37
+ } ) ( token )
31
38
32
- if ( process . env . SSR ) {
33
- fetch ( token . embed . url ) . then ( next )
34
- } else {
35
- get ( token . embed . url ) . then ( next )
39
+ if ( process . env . SSR ) {
40
+ fetch ( token . embed . url ) . then ( next )
41
+ } else {
42
+ get ( token . embed . url ) . then ( next )
43
+ }
36
44
}
37
45
}
38
46
0 commit comments