Skip to content

css-tokenizer : remove commentsAreTokens #779

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/cascade-layer-name-parser/dist/index.cjs
Original file line number Diff line number Diff line change
@@ -1 +1 @@
"use strict";var e=require("@csstools/css-tokenizer"),n=require("@csstools/css-parser-algorithms");class LayerName{parts;constructor(e){this.parts=e}tokens(){return[...this.parts]}slice(n,r){const t=[];for(let n=0;n<this.parts.length;n++)this.parts[n][0]===e.TokenType.Ident&&t.push(n);const a=t.slice(n,r);return new LayerName(this.parts.slice(a[0],a[a.length-1]+1))}concat(n){const r=[e.TokenType.Delim,".",-1,-1,{value:"."}];return new LayerName([...this.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim)),r,...n.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim))])}segments(){return this.parts.filter((n=>n[0]===e.TokenType.Ident)).map((e=>e[4].value))}name(){return this.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim)).map((e=>e[1])).join("")}equal(e){const n=this.segments(),r=e.segments();if(n.length!==r.length)return!1;for(let e=0;e<n.length;e++){if(n[e]!==r[e])return!1}return!0}toString(){return e.stringify(...this.parts)}toJSON(){return{parts:this.parts,segments:this.segments(),name:this.name()}}}function parseFromTokens(r,t){const a=n.parseCommaSeparatedListOfComponentValues(r,{onParseError:null==t?void 0:t.onParseError}),s=(null==t?void 0:t.onParseError)??(()=>{}),o=["6.4.2. Layer Naming and Nesting","Layer name syntax","<layer-name> = <ident> [ '.' <ident> ]*"],i=r[0][2],l=r[r.length-1][3],p=[];for(let r=0;r<a.length;r++){const t=a[r];for(let r=0;r<t.length;r++){const a=t[r];if(!n.isTokenNode(a)&&!n.isCommentNode(a)&&!n.isWhitespaceNode(a))return s(new e.ParseError(`Invalid cascade layer name. Invalid layer name part "${a.toString()}"`,i,l,o)),[]}const m=t.flatMap((e=>e.tokens()));let c,T=!1,y=!1;for(let n=0;n<m.length;n++){const r=m[n];if(r[0]!==e.TokenType.Comment&&r[0]!==e.TokenType.Whitespace&&r[0]!==e.TokenType.Ident&&(r[0]!==e.TokenType.Delim||"."!==r[4].value))return s(new e.ParseError(`Invalid cascade layer name. Invalid character "${r[1]}"`,i,l,o)),[];if(!T&&r[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer names can not start with a dot.",i,l,o)),[];if(T){if(r[0]===e.TokenType.Whitespace){y=!0;continue}if(y&&r[0]===e.TokenType.Comment)continue;if(y)return s(new e.ParseError("Invalid cascade layer name. Encountered unexpected whitespace between layer name parts.",i,l,o)),[];if(c[0]===e.TokenType.Ident&&r[0]===e.TokenType.Ident)return s(new e.ParseError("Invalid cascade layer name. Layer name parts must be separated by dots.",i,l,o)),[];if(c[0]===e.TokenType.Delim&&r[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer name parts must not be empty.",i,l,o)),[]}r[0]===e.TokenType.Ident&&(T=!0),r[0]!==e.TokenType.Ident&&r[0]!==e.TokenType.Delim||(c=r)}if(!c)return s(new e.ParseError("Invalid cascade layer name. Empty layer name.",i,l,o)),[];if(c[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer name must not end with a dot.",i,l,o)),[];p.push(new LayerName(m))}return p}exports.LayerName=LayerName,exports.addLayerToModel=function addLayerToModel(e,n){return n.forEach((n=>{const r=n.segments();e:for(let t=0;t<r.length;t++){const r=n.slice(0,t+1),a=r.segments();let s=-1,o=0;for(let n=0;n<e.length;n++){const r=e[n].segments();let t=0;n:for(let e=0;e<r.length;e++){const n=r[e],s=a[e];if(s===n&&e+1===a.length)continue e;if(s!==n){if(s!==n)break n}else t++}t>=o&&(s=n,o=t)}-1===s?e.push(r):e.splice(s+1,0,r)}})),e},exports.parse=function parse(n,r){const t=e.tokenizer({css:n},{commentsAreTokens:!0,onParseError:null==r?void 0:r.onParseError}),a=[];for(;!t.endOfFile();)a.push(t.nextToken());return a.push(t.nextToken()),parseFromTokens(a,r)},exports.parseFromTokens=parseFromTokens;
"use strict";var e=require("@csstools/css-tokenizer"),n=require("@csstools/css-parser-algorithms");class LayerName{parts;constructor(e){this.parts=e}tokens(){return[...this.parts]}slice(n,r){const t=[];for(let n=0;n<this.parts.length;n++)this.parts[n][0]===e.TokenType.Ident&&t.push(n);const a=t.slice(n,r);return new LayerName(this.parts.slice(a[0],a[a.length-1]+1))}concat(n){const r=[e.TokenType.Delim,".",-1,-1,{value:"."}];return new LayerName([...this.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim)),r,...n.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim))])}segments(){return this.parts.filter((n=>n[0]===e.TokenType.Ident)).map((e=>e[4].value))}name(){return this.parts.filter((n=>n[0]===e.TokenType.Ident||n[0]===e.TokenType.Delim)).map((e=>e[1])).join("")}equal(e){const n=this.segments(),r=e.segments();if(n.length!==r.length)return!1;for(let e=0;e<n.length;e++){if(n[e]!==r[e])return!1}return!0}toString(){return e.stringify(...this.parts)}toJSON(){return{parts:this.parts,segments:this.segments(),name:this.name()}}}function parseFromTokens(r,t){const a=n.parseCommaSeparatedListOfComponentValues(r,{onParseError:null==t?void 0:t.onParseError}),s=(null==t?void 0:t.onParseError)??(()=>{}),o=["6.4.2. Layer Naming and Nesting","Layer name syntax","<layer-name> = <ident> [ '.' <ident> ]*"],i=r[0][2],l=r[r.length-1][3],p=[];for(let r=0;r<a.length;r++){const t=a[r];for(let r=0;r<t.length;r++){const a=t[r];if(!n.isTokenNode(a)&&!n.isCommentNode(a)&&!n.isWhitespaceNode(a))return s(new e.ParseError(`Invalid cascade layer name. Invalid layer name part "${a.toString()}"`,i,l,o)),[]}const m=t.flatMap((e=>e.tokens()));let c,T=!1,y=!1;for(let n=0;n<m.length;n++){const r=m[n];if(r[0]!==e.TokenType.Comment&&r[0]!==e.TokenType.Whitespace&&r[0]!==e.TokenType.Ident&&(r[0]!==e.TokenType.Delim||"."!==r[4].value))return s(new e.ParseError(`Invalid cascade layer name. Invalid character "${r[1]}"`,i,l,o)),[];if(!T&&r[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer names can not start with a dot.",i,l,o)),[];if(T){if(r[0]===e.TokenType.Whitespace){y=!0;continue}if(y&&r[0]===e.TokenType.Comment)continue;if(y)return s(new e.ParseError("Invalid cascade layer name. Encountered unexpected whitespace between layer name parts.",i,l,o)),[];if(c[0]===e.TokenType.Ident&&r[0]===e.TokenType.Ident)return s(new e.ParseError("Invalid cascade layer name. Layer name parts must be separated by dots.",i,l,o)),[];if(c[0]===e.TokenType.Delim&&r[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer name parts must not be empty.",i,l,o)),[]}r[0]===e.TokenType.Ident&&(T=!0),r[0]!==e.TokenType.Ident&&r[0]!==e.TokenType.Delim||(c=r)}if(!c)return s(new e.ParseError("Invalid cascade layer name. Empty layer name.",i,l,o)),[];if(c[0]===e.TokenType.Delim)return s(new e.ParseError("Invalid cascade layer name. Layer name must not end with a dot.",i,l,o)),[];p.push(new LayerName(m))}return p}exports.LayerName=LayerName,exports.addLayerToModel=function addLayerToModel(e,n){return n.forEach((n=>{const r=n.segments();e:for(let t=0;t<r.length;t++){const r=n.slice(0,t+1),a=r.segments();let s=-1,o=0;for(let n=0;n<e.length;n++){const r=e[n].segments();let t=0;n:for(let e=0;e<r.length;e++){const n=r[e],s=a[e];if(s===n&&e+1===a.length)continue e;if(s!==n){if(s!==n)break n}else t++}t>=o&&(s=n,o=t)}-1===s?e.push(r):e.splice(s+1,0,r)}})),e},exports.parse=function parse(n,r){const t=e.tokenizer({css:n},{onParseError:null==r?void 0:r.onParseError}),a=[];for(;!t.endOfFile();)a.push(t.nextToken());return a.push(t.nextToken()),parseFromTokens(a,r)},exports.parseFromTokens=parseFromTokens;
2 changes: 1 addition & 1 deletion packages/cascade-layer-name-parser/dist/index.mjs
Original file line number Diff line number Diff line change
@@ -1 +1 @@
import{TokenType as e,stringify as t,ParseError as n,tokenizer as r}from"@csstools/css-tokenizer";import{parseCommaSeparatedListOfComponentValues as a,isTokenNode as s,isCommentNode as o,isWhitespaceNode as i}from"@csstools/css-parser-algorithms";class LayerName{parts;constructor(e){this.parts=e}tokens(){return[...this.parts]}slice(t,n){const r=[];for(let t=0;t<this.parts.length;t++)this.parts[t][0]===e.Ident&&r.push(t);const a=r.slice(t,n);return new LayerName(this.parts.slice(a[0],a[a.length-1]+1))}concat(t){const n=[e.Delim,".",-1,-1,{value:"."}];return new LayerName([...this.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim)),n,...t.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim))])}segments(){return this.parts.filter((t=>t[0]===e.Ident)).map((e=>e[4].value))}name(){return this.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim)).map((e=>e[1])).join("")}equal(e){const t=this.segments(),n=e.segments();if(t.length!==n.length)return!1;for(let e=0;e<t.length;e++){if(t[e]!==n[e])return!1}return!0}toString(){return t(...this.parts)}toJSON(){return{parts:this.parts,segments:this.segments(),name:this.name()}}}function addLayerToModel(e,t){return t.forEach((t=>{const n=t.segments();e:for(let r=0;r<n.length;r++){const n=t.slice(0,r+1),a=n.segments();let s=-1,o=0;for(let t=0;t<e.length;t++){const n=e[t].segments();let r=0;t:for(let e=0;e<n.length;e++){const t=n[e],s=a[e];if(s===t&&e+1===a.length)continue e;if(s!==t){if(s!==t)break t}else r++}r>=o&&(s=t,o=r)}-1===s?e.push(n):e.splice(s+1,0,n)}})),e}function parseFromTokens(t,r){const l=a(t,{onParseError:null==r?void 0:r.onParseError}),m=(null==r?void 0:r.onParseError)??(()=>{}),c=["6.4.2. Layer Naming and Nesting","Layer name syntax","<layer-name> = <ident> [ '.' <ident> ]*"],d=t[0][2],u=t[t.length-1][3],p=[];for(let t=0;t<l.length;t++){const r=l[t];for(let e=0;e<r.length;e++){const t=r[e];if(!s(t)&&!o(t)&&!i(t))return m(new n(`Invalid cascade layer name. Invalid layer name part "${t.toString()}"`,d,u,c)),[]}const a=r.flatMap((e=>e.tokens()));let h,f=!1,y=!1;for(let t=0;t<a.length;t++){const r=a[t];if(r[0]!==e.Comment&&r[0]!==e.Whitespace&&r[0]!==e.Ident&&(r[0]!==e.Delim||"."!==r[4].value))return m(new n(`Invalid cascade layer name. Invalid character "${r[1]}"`,d,u,c)),[];if(!f&&r[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer names can not start with a dot.",d,u,c)),[];if(f){if(r[0]===e.Whitespace){y=!0;continue}if(y&&r[0]===e.Comment)continue;if(y)return m(new n("Invalid cascade layer name. Encountered unexpected whitespace between layer name parts.",d,u,c)),[];if(h[0]===e.Ident&&r[0]===e.Ident)return m(new n("Invalid cascade layer name. Layer name parts must be separated by dots.",d,u,c)),[];if(h[0]===e.Delim&&r[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer name parts must not be empty.",d,u,c)),[]}r[0]===e.Ident&&(f=!0),r[0]!==e.Ident&&r[0]!==e.Delim||(h=r)}if(!h)return m(new n("Invalid cascade layer name. Empty layer name.",d,u,c)),[];if(h[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer name must not end with a dot.",d,u,c)),[];p.push(new LayerName(a))}return p}function parse(e,t){const n=r({css:e},{commentsAreTokens:!0,onParseError:null==t?void 0:t.onParseError}),a=[];for(;!n.endOfFile();)a.push(n.nextToken());return a.push(n.nextToken()),parseFromTokens(a,t)}export{LayerName,addLayerToModel,parse,parseFromTokens};
import{TokenType as e,stringify as t,ParseError as n,tokenizer as r}from"@csstools/css-tokenizer";import{parseCommaSeparatedListOfComponentValues as a,isTokenNode as s,isCommentNode as i,isWhitespaceNode as l}from"@csstools/css-parser-algorithms";class LayerName{parts;constructor(e){this.parts=e}tokens(){return[...this.parts]}slice(t,n){const r=[];for(let t=0;t<this.parts.length;t++)this.parts[t][0]===e.Ident&&r.push(t);const a=r.slice(t,n);return new LayerName(this.parts.slice(a[0],a[a.length-1]+1))}concat(t){const n=[e.Delim,".",-1,-1,{value:"."}];return new LayerName([...this.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim)),n,...t.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim))])}segments(){return this.parts.filter((t=>t[0]===e.Ident)).map((e=>e[4].value))}name(){return this.parts.filter((t=>t[0]===e.Ident||t[0]===e.Delim)).map((e=>e[1])).join("")}equal(e){const t=this.segments(),n=e.segments();if(t.length!==n.length)return!1;for(let e=0;e<t.length;e++){if(t[e]!==n[e])return!1}return!0}toString(){return t(...this.parts)}toJSON(){return{parts:this.parts,segments:this.segments(),name:this.name()}}}function addLayerToModel(e,t){return t.forEach((t=>{const n=t.segments();e:for(let r=0;r<n.length;r++){const n=t.slice(0,r+1),a=n.segments();let s=-1,i=0;for(let t=0;t<e.length;t++){const n=e[t].segments();let r=0;t:for(let e=0;e<n.length;e++){const t=n[e],s=a[e];if(s===t&&e+1===a.length)continue e;if(s!==t){if(s!==t)break t}else r++}r>=i&&(s=t,i=r)}-1===s?e.push(n):e.splice(s+1,0,n)}})),e}function parseFromTokens(t,r){const o=a(t,{onParseError:null==r?void 0:r.onParseError}),m=(null==r?void 0:r.onParseError)??(()=>{}),c=["6.4.2. Layer Naming and Nesting","Layer name syntax","<layer-name> = <ident> [ '.' <ident> ]*"],d=t[0][2],u=t[t.length-1][3],p=[];for(let t=0;t<o.length;t++){const r=o[t];for(let e=0;e<r.length;e++){const t=r[e];if(!s(t)&&!i(t)&&!l(t))return m(new n(`Invalid cascade layer name. Invalid layer name part "${t.toString()}"`,d,u,c)),[]}const a=r.flatMap((e=>e.tokens()));let h,f=!1,y=!1;for(let t=0;t<a.length;t++){const r=a[t];if(r[0]!==e.Comment&&r[0]!==e.Whitespace&&r[0]!==e.Ident&&(r[0]!==e.Delim||"."!==r[4].value))return m(new n(`Invalid cascade layer name. Invalid character "${r[1]}"`,d,u,c)),[];if(!f&&r[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer names can not start with a dot.",d,u,c)),[];if(f){if(r[0]===e.Whitespace){y=!0;continue}if(y&&r[0]===e.Comment)continue;if(y)return m(new n("Invalid cascade layer name. Encountered unexpected whitespace between layer name parts.",d,u,c)),[];if(h[0]===e.Ident&&r[0]===e.Ident)return m(new n("Invalid cascade layer name. Layer name parts must be separated by dots.",d,u,c)),[];if(h[0]===e.Delim&&r[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer name parts must not be empty.",d,u,c)),[]}r[0]===e.Ident&&(f=!0),r[0]!==e.Ident&&r[0]!==e.Delim||(h=r)}if(!h)return m(new n("Invalid cascade layer name. Empty layer name.",d,u,c)),[];if(h[0]===e.Delim)return m(new n("Invalid cascade layer name. Layer name must not end with a dot.",d,u,c)),[];p.push(new LayerName(a))}return p}function parse(e,t){const n=r({css:e},{onParseError:null==t?void 0:t.onParseError}),a=[];for(;!n.endOfFile();)a.push(n.nextToken());return a.push(n.nextToken()),parseFromTokens(a,t)}export{LayerName,addLayerToModel,parse,parseFromTokens};
1 change: 0 additions & 1 deletion packages/cascade-layer-name-parser/src/parser/parse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ export function parseFromTokens(tokens: Array<CSSToken>, options?: Options) {

export function parse(source: string, options?: Options) {
const t = tokenizer({ css: source }, {
commentsAreTokens: true,
onParseError: options?.onParseError,
});

Expand Down
2 changes: 1 addition & 1 deletion packages/cascade-layer-name-parser/test/util/run-test.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export function runTest(source, testPath, assertEqual, expectSuccess = true) {
} else {
if (expectSuccess) {
if (err) {
throw new Error(JSON.stringify(err));
throw err;
}
} else {
assertEqual(!!err, true);
Expand Down
2 changes: 1 addition & 1 deletion packages/css-parser-algorithms/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ const tokens = [];

const options = {
onParseError: ((err) => {
throw new Error(JSON.stringify(err));
throw err;
}),
};

Expand Down
1 change: 0 additions & 1 deletion packages/css-parser-algorithms/test/util/run-test.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ export function runTest(source, testPath, assertEqual, expectParseError = false)
throw new Error(`Unable to parse "${source}"`);
};
const t = tokenizer({ css: source }, {
commentsAreTokens: true,
onParseError: onParseError,
});

Expand Down
1 change: 1 addition & 0 deletions packages/css-tokenizer/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

- Simplify `Reader` interface (breaking)
- Change the `ParseError` interface, this is now a subclass of `Error` (breaking)
- Remove the `commentsAreTokens` option as `true` was the only desirable value (breaking)
- Improve performance

### 1.0.0 (November 14, 2022)
Expand Down
26 changes: 0 additions & 26 deletions packages/css-tokenizer/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,36 +42,10 @@ while (true) {

```ts
{
commentsAreTokens?: false,
onParseError?: (error: ParseError) => void
}
```

#### `commentsAreTokens`

Following the CSS specification comments are never returned by the tokenizer.
For many tools however it is desirable to be able to convert tokens back to a string.

```js
import { tokenizer, TokenType } from '@csstools/css-tokenizer';

const t = tokenizer({
css: `/* a comment */`,
}, { commentsAreTokens: true });

while (true) {
const token = t.nextToken();
if (token[0] === TokenType.EOF) {
break;
}

console.log(token);
}
```

logs : `['comment', '/* a comment */', <start>, <end>, undefined]`


#### `onParseError`

The tokenizer is forgiving and won't stop when a parse error is encountered.
Expand Down
2 changes: 1 addition & 1 deletion packages/css-tokenizer/dist/index.cjs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion packages/css-tokenizer/dist/index.mjs

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion packages/css-tokenizer/dist/tokenizer.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ interface Stringer {
export declare function tokenizer(input: {
css: Stringer;
}, options?: {
commentsAreTokens?: boolean;
onParseError?: (error: ParseError) => void;
}): {
nextToken: () => CSSToken | undefined;
Expand Down
10 changes: 2 additions & 8 deletions packages/css-tokenizer/src/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ interface Stringer {
valueOf(): string
}

export function tokenizer(input: { css: Stringer }, options?: { commentsAreTokens?: boolean, onParseError?: (error: ParseError) => void }) {
export function tokenizer(input: { css: Stringer }, options?: { onParseError?: (error: ParseError) => void }) {
const css = input.css.valueOf();

const reader = new Reader(css);
Expand All @@ -39,13 +39,7 @@ export function tokenizer(input: { css: Stringer }, options?: { commentsAreToken
reader.representationEnd = -1;

if (checkIfTwoCodePointsStartAComment(ctx, reader)) {
if (options?.commentsAreTokens) {
return consumeComment(ctx, reader);
} else {
consumeComment(ctx, reader);
reader.representationStart = reader.cursor;
reader.representationEnd = -1;
}
return consumeComment(ctx, reader);
}

const peeked = reader.codePointSource[reader.cursor];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ eachTest((name, css) => {
css: css,
},
{
commentsAreTokens: true,
onParseError: (err) => {
// We only expect something like bootstrap to tokenize without parser errors.
throw new Error(JSON.stringify(err));
Expand Down
1 change: 0 additions & 1 deletion packages/css-tokenizer/test/community/token-types.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ ${openPropsSource}
css: source,
},
{
commentsAreTokens: true,
onParseError: () => {
// noop
},
Expand Down
1 change: 1 addition & 0 deletions packages/css-tokenizer/test/complex/at-media-params.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
'screen/* a comment */(--custom-mq)',
[
['ident-token', 'screen', 0, 5, { value: 'screen' }],
['comment', '/* a comment */', 6, 20, undefined],
['(-token', '(', 21, 21, undefined],
['ident-token', '--custom-mq', 22, 32, { value: '--custom-mq' }],
[')-token', ')', 33, 33, undefined],
Expand Down
2 changes: 1 addition & 1 deletion packages/css-tokenizer/test/css-tokenizer-tests/test.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { tokenizer } from '@csstools/css-tokenizer';
for (const testCase in testCorpus) {
const t = tokenizer({
css: testCorpus[testCase].css,
}, {commentsAreTokens: true});
});

assert.deepEqual(
collectTokens(t).map((x) => toUniversal(x)).slice(0, -1),
Expand Down
1 change: 0 additions & 1 deletion packages/css-tokenizer/test/inspect.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ while (true) {
css: source,
},
{
commentsAreTokens: true,
onParseError: () => {
// noop
},
Expand Down
10 changes: 5 additions & 5 deletions packages/css-tokenizer/test/token/comment.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
{
const t = tokenizer({
css: 'a /* a comment */ b',
}, { commentsAreTokens : true });
});

assert.deepEqual(
collectTokens(t),
Expand All @@ -23,7 +23,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
{
const t = tokenizer({
css: 'a/* a comment */b',
}, { commentsAreTokens: true });
});

assert.deepEqual(
collectTokens(t),
Expand All @@ -39,7 +39,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
{
const t = tokenizer({
css: 'a /* a comment',
}, { commentsAreTokens: true });
});

assert.deepEqual(
collectTokens(t),
Expand All @@ -56,7 +56,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
const t = tokenizer({
css: `a /* a comment
*/`,
}, { commentsAreTokens: true });
});

assert.deepEqual(
collectTokens(t),
Expand All @@ -72,7 +72,7 @@ import { collectTokens } from '../util/collect-tokens.mjs';
{
const t = tokenizer({
css: 'a /* a comment \\*/ b',
}, { commentsAreTokens: true });
});

assert.deepEqual(
collectTokens(t),
Expand Down
2 changes: 1 addition & 1 deletion packages/media-query-list-parser/dist/index.cjs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion packages/media-query-list-parser/dist/index.mjs

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ export function parseCustomMediaFromTokens(tokens: Array<CSSToken>, options?: Op

export function parseCustomMedia(source: string, options?: Options): CustomMedia | false {
const t = tokenizer({ css: source }, {
commentsAreTokens: true,
onParseError: options?.onParseError,
});

Expand Down
1 change: 0 additions & 1 deletion packages/media-query-list-parser/src/parser/parse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ export function parseFromTokens(tokens: Array<CSSToken>, options?: Options) {

export function parse(source: string, options?: Options) {
const t = tokenizer({ css: source }, {
commentsAreTokens: true,
onParseError: options?.onParseError,
});

Expand Down
Loading