@@ -112,6 +112,16 @@ function escape(str: string) {
112
112
return str . replace ( / [ . + * ? ^ $ { } ( ) [ \] | / \\ ] / g, "\\$&" ) ;
113
113
}
114
114
115
+ /**
116
+ * Format error so it's easier to debug.
117
+ */
118
+ function errorMessage ( message : string , originalPath : string | undefined ) {
119
+ if ( originalPath ) {
120
+ return `${ message } : ${ originalPath } ; visit ${ DEBUG_URL } for more info` ;
121
+ }
122
+ return `${ message } ; visit ${ DEBUG_URL } for more info` ;
123
+ }
124
+
115
125
/**
116
126
* Tokenize input string.
117
127
*/
@@ -145,12 +155,16 @@ function* lexer(str: string): Generator<LexToken, LexToken> {
145
155
}
146
156
147
157
if ( pos ) {
148
- throw new TypeError ( `Unterminated quote at ${ pos } : ${ DEBUG_URL } ` ) ;
158
+ throw new TypeError (
159
+ errorMessage ( `Unterminated quote at index ${ pos } ` , str ) ,
160
+ ) ;
149
161
}
150
162
}
151
163
152
164
if ( ! value ) {
153
- throw new TypeError ( `Missing parameter name at ${ i } : ${ DEBUG_URL } ` ) ;
165
+ throw new TypeError (
166
+ errorMessage ( `Missing parameter name at index ${ i } ` , str ) ,
167
+ ) ;
154
168
}
155
169
156
170
return value ;
@@ -180,12 +194,15 @@ function* lexer(str: string): Generator<LexToken, LexToken> {
180
194
181
195
class Iter {
182
196
private _peek ?: LexToken ;
197
+ private _tokens : Generator < LexToken , LexToken > ;
183
198
184
- constructor ( private tokens : Generator < LexToken , LexToken > ) { }
199
+ constructor ( private originalPath : string ) {
200
+ this . _tokens = lexer ( originalPath ) ;
201
+ }
185
202
186
203
peek ( ) : LexToken {
187
204
if ( ! this . _peek ) {
188
- const next = this . tokens . next ( ) ;
205
+ const next = this . _tokens . next ( ) ;
189
206
this . _peek = next . value ;
190
207
}
191
208
return this . _peek ;
@@ -203,7 +220,10 @@ class Iter {
203
220
if ( value !== undefined ) return value ;
204
221
const { type : nextType , index } = this . peek ( ) ;
205
222
throw new TypeError (
206
- `Unexpected ${ nextType } at ${ index } , expected ${ type } : ${ DEBUG_URL } ` ,
223
+ errorMessage (
224
+ `Unexpected ${ nextType } at index ${ index } , expected ${ type } ` ,
225
+ this . originalPath ,
226
+ ) ,
207
227
) ;
208
228
}
209
229
@@ -268,15 +288,18 @@ export type Token = Text | Parameter | Wildcard | Group;
268
288
* Tokenized path instance.
269
289
*/
270
290
export class TokenData {
271
- constructor ( public readonly tokens : Token [ ] ) { }
291
+ constructor (
292
+ public readonly tokens : Token [ ] ,
293
+ public readonly originalPath ?: string ,
294
+ ) { }
272
295
}
273
296
274
297
/**
275
298
* Parse a string for the raw tokens.
276
299
*/
277
300
export function parse ( str : string , options : ParseOptions = { } ) : TokenData {
278
301
const { encodePath = NOOP_VALUE } = options ;
279
- const it = new Iter ( lexer ( str ) ) ;
302
+ const it = new Iter ( str ) ;
280
303
281
304
function consume ( endType : TokenType ) : Token [ ] {
282
305
const tokens : Token [ ] = [ ] ;
@@ -318,7 +341,7 @@ export function parse(str: string, options: ParseOptions = {}): TokenData {
318
341
}
319
342
320
343
const tokens = consume ( "END" ) ;
321
- return new TokenData ( tokens ) ;
344
+ return new TokenData ( tokens , str ) ;
322
345
}
323
346
324
347
/**
@@ -496,12 +519,8 @@ export function pathToRegexp(
496
519
trailing = true ,
497
520
} = options ;
498
521
const keys : Keys = [ ] ;
499
- const sources : string [ ] = [ ] ;
500
522
const flags = sensitive ? "" : "i" ;
501
-
502
- for ( const seq of flat ( path , options ) ) {
503
- sources . push ( toRegExp ( seq , delimiter , keys ) ) ;
504
- }
523
+ const sources = Array . from ( toRegExps ( path , delimiter , keys , options ) ) ;
505
524
506
525
let pattern = `^(?:${ sources . join ( "|" ) } )` ;
507
526
if ( trailing ) pattern += `(?:${ escape ( delimiter ) } $)?` ;
@@ -511,35 +530,39 @@ export function pathToRegexp(
511
530
return { regexp, keys } ;
512
531
}
513
532
514
- /**
515
- * Flattened token set.
516
- */
517
- type Flattened = Text | Parameter | Wildcard ;
518
-
519
533
/**
520
534
* Path or array of paths to normalize.
521
535
*/
522
- function * flat (
536
+ function * toRegExps (
523
537
path : Path | Path [ ] ,
538
+ delimiter : string ,
539
+ keys : Keys ,
524
540
options : ParseOptions ,
525
- ) : Generator < Flattened [ ] > {
541
+ ) : Generator < string > {
526
542
if ( Array . isArray ( path ) ) {
527
- for ( const p of path ) yield * flat ( p , options ) ;
543
+ for ( const p of path ) yield * toRegExps ( p , delimiter , keys , options ) ;
528
544
return ;
529
545
}
530
546
531
547
const data = path instanceof TokenData ? path : parse ( path , options ) ;
532
- yield * flatten ( data . tokens , 0 , [ ] ) ;
548
+ for ( const tokens of flatten ( data . tokens , 0 , [ ] ) ) {
549
+ yield toRegExp ( tokens , delimiter , keys , data . originalPath ) ;
550
+ }
533
551
}
534
552
553
+ /**
554
+ * Flattened token set.
555
+ */
556
+ type FlatToken = Text | Parameter | Wildcard ;
557
+
535
558
/**
536
559
* Generate a flat list of sequence tokens from the given tokens.
537
560
*/
538
561
function * flatten (
539
562
tokens : Token [ ] ,
540
563
index : number ,
541
- init : Flattened [ ] ,
542
- ) : Generator < Flattened [ ] > {
564
+ init : FlatToken [ ] ,
565
+ ) : Generator < FlatToken [ ] > {
543
566
if ( index === tokens . length ) {
544
567
return yield init ;
545
568
}
@@ -560,7 +583,12 @@ function* flatten(
560
583
/**
561
584
* Transform a flat sequence of tokens into a regular expression.
562
585
*/
563
- function toRegExp ( tokens : Flattened [ ] , delimiter : string , keys : Keys ) {
586
+ function toRegExp (
587
+ tokens : FlatToken [ ] ,
588
+ delimiter : string ,
589
+ keys : Keys ,
590
+ originalPath : string | undefined ,
591
+ ) {
564
592
let result = "" ;
565
593
let backtrack = "" ;
566
594
let isSafeSegmentParam = true ;
@@ -575,7 +603,9 @@ function toRegExp(tokens: Flattened[], delimiter: string, keys: Keys) {
575
603
576
604
if ( token . type === "param" || token . type === "wildcard" ) {
577
605
if ( ! isSafeSegmentParam && ! backtrack ) {
578
- throw new TypeError ( `Missing text after "${ token . name } ": ${ DEBUG_URL } ` ) ;
606
+ throw new TypeError (
607
+ errorMessage ( `Missing text after "${ token . name } "` , originalPath ) ,
608
+ ) ;
579
609
}
580
610
581
611
if ( token . type === "param" ) {
0 commit comments