|
5 | 5 | */
|
6 | 6 | import assert from "assert"
|
7 | 7 | import * as lodash from "lodash"
|
8 |
| -import {ErrorCode, HasLocation, Namespace, ParseError, Token, VAttribute} from "../ast" |
9 |
| -import {debug} from "../common/debug" |
10 |
| -import {Tokenizer, TokenizerState, TokenType} from "./tokenizer" |
| 8 | +import { ErrorCode, HasLocation, Namespace, ParseError, Token, VAttribute } from "../ast" |
| 9 | +import { debug } from "../common/debug" |
| 10 | +import { Tokenizer, TokenizerState, TokenType } from "./tokenizer" |
11 | 11 |
|
12 | 12 | const DUMMY_PARENT: any = Object.freeze({})
|
13 | 13 |
|
@@ -177,7 +177,7 @@ export class IntermediateTokenizer {
|
177 | 177 | token = {
|
178 | 178 | type: "Text",
|
179 | 179 | range: [start.range[0], end.range[1]],
|
180 |
| - loc: {start: start.loc.start, end: end.loc.end}, |
| 180 | + loc: { start: start.loc.start, end: end.loc.end }, |
181 | 181 | value,
|
182 | 182 | }
|
183 | 183 | }
|
@@ -253,7 +253,7 @@ export class IntermediateTokenizer {
|
253 | 253 | this.currentToken = {
|
254 | 254 | type: "Text",
|
255 | 255 | range: [token.range[0], token.range[1]],
|
256 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 256 | + loc: { start: token.loc.start, end: token.loc.end }, |
257 | 257 | value: token.value,
|
258 | 258 | }
|
259 | 259 |
|
@@ -321,7 +321,7 @@ export class IntermediateTokenizer {
|
321 | 321 | this.currentToken = {
|
322 | 322 | type: "EndTag",
|
323 | 323 | range: [token.range[0], token.range[1]],
|
324 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 324 | + loc: { start: token.loc.start, end: token.loc.end }, |
325 | 325 | name: token.value,
|
326 | 326 | }
|
327 | 327 |
|
@@ -350,13 +350,13 @@ export class IntermediateTokenizer {
|
350 | 350 | this.attribute = {
|
351 | 351 | type: "VAttribute",
|
352 | 352 | range: [token.range[0], token.range[1]],
|
353 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 353 | + loc: { start: token.loc.start, end: token.loc.end }, |
354 | 354 | parent: DUMMY_PARENT,
|
355 | 355 | directive: false,
|
356 | 356 | key: {
|
357 | 357 | type: "VIdentifier",
|
358 | 358 | range: [token.range[0], token.range[1]],
|
359 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 359 | + loc: { start: token.loc.start, end: token.loc.end }, |
360 | 360 | parent: DUMMY_PARENT,
|
361 | 361 | name: token.value,
|
362 | 362 | rawName: this.text.slice(token.range[0], token.range[1]),
|
@@ -385,7 +385,7 @@ export class IntermediateTokenizer {
|
385 | 385 | this.attribute.value = {
|
386 | 386 | type: "VLiteral",
|
387 | 387 | range: [token.range[0], token.range[1]],
|
388 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 388 | + loc: { start: token.loc.start, end: token.loc.end }, |
389 | 389 | parent: this.attribute,
|
390 | 390 | value: token.value,
|
391 | 391 | }
|
@@ -473,7 +473,7 @@ export class IntermediateTokenizer {
|
473 | 473 | this.currentToken = {
|
474 | 474 | type: "StartTag",
|
475 | 475 | range: [token.range[0], token.range[1]],
|
476 |
| - loc: {start: token.loc.start, end: token.loc.end}, |
| 476 | + loc: { start: token.loc.start, end: token.loc.end }, |
477 | 477 | name: token.value,
|
478 | 478 | rawName: this.text.slice(token.range[0] + 1, token.range[1]),
|
479 | 479 | selfClosing: false,
|
@@ -548,7 +548,7 @@ export class IntermediateTokenizer {
|
548 | 548 | this.currentToken = {
|
549 | 549 | type: "Mustache",
|
550 | 550 | range: [start.range[0], token.range[1]],
|
551 |
| - loc: {start: start.loc.start, end: token.loc.end}, |
| 551 | + loc: { start: start.loc.start, end: token.loc.end }, |
552 | 552 | value,
|
553 | 553 | startToken: start,
|
554 | 554 | endToken: token,
|
|
0 commit comments