Skip to content

Commit f03e832

Browse files
authored
feat: support sub-attributes after value paths. (#99)
Allow comparison operations on sub-attributes following value paths. e.g. `emails[type eq "work"].value eq "[email protected]"` The tokenizer regex was updated to allow for a trailing dot (.) after a closing square bracket (]) when searching for "Bracket" tokens. `readValFilter` was then updated to look for any dot after the closing bracket and a following "Word" token. When this happens an implicit `and` op is returned. One issue with this approach is that any `and` ops before or after this syntax will result in nested `and` ops instead of a single op with more than two filters. Check the last test added to `parse.test.ts` for an example. However, I couldn't figure out a neat way to flattening that use case without a much more disruptive changeset. fixes: #96
1 parent 3b2fa7f commit f03e832

File tree

4 files changed

+60
-6
lines changed

4 files changed

+60
-6
lines changed

package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"filter"
1616
],
1717
"scripts": {
18-
"watch": "mocha -r ts-node/register 'test/**/*.ts' 'src/**/*.ts' -w --watch-extensions ts",
18+
"watch": "mocha -r ts-node/register 'test/**/*.ts' 'src/**/*.ts' -w --watch-extensions ts",
1919
"build": "tsc",
2020
"test": "npm run build && mocha 'lib/test/**/*.test.js'"
2121
},

src/parser.ts

+14-5
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ export type Token = {
1010
export function tokenizer(f: string): Token[] {
1111
const ret: Token[] = [];
1212
let rest = f;
13-
const patterns = /^(?:(\s+)|(-?\d+(?:\.\d+)?(?:[eE][-+]?\d+)?)|("(?:[^"]|\\.|\n)*")|([[\]()])|(\w[-\w\._:\/%]*))/;
13+
const patterns = /^(?:(\s+)|(-?\d+(?:\.\d+)?(?:[eE][-+]?\d+)?)|("(?:[^"]|\\.|\n)*")|([[()]|]\.?)|(\w[-\w._:\/%]*))/;
1414
let n;
1515
while ((n = patterns.exec(rest))) {
1616
if (n[1] || n[0].length === 0) {
@@ -127,17 +127,26 @@ function readValFilter(left: Token, list: TokenList): Filter {
127127
const t = list.shift();
128128
const op = t.literal.toLowerCase();
129129
if (cops.has(op)) {
130-
var compValue = parseCompValue(list);
130+
const compValue = parseCompValue(list);
131131
return { op, attrPath, compValue } as Compare;
132132
} else if (sops.has(op)) {
133133
return { op, attrPath } as Suffix;
134-
} else if (op == "[") {
134+
} else if (op === "[") {
135135
const valFilter = parseFilter(list);
136136
const close = list.shift();
137-
if (close.literal !== "]") {
137+
if (close.literal[0] !== "]") {
138138
throw new Error(`Unexpected token ${close.literal} expected ']'`);
139139
}
140-
return { op: "[]", attrPath, valFilter } as ValuePath;
140+
const valPath: ValuePath = { op: "[]", attrPath, valFilter };
141+
142+
if (close.literal[1] !== "." || list.peek().type !== "Word") {
143+
return valPath
144+
}
145+
146+
// convert a sub-attribute after a value-path to an 'and' op
147+
const next = list.shift()
148+
next.literal = `${attrPath}.${next.literal}`
149+
return { op: 'and', filters: [valPath, readValFilter(next, list)] }
141150
} else {
142151
throw new Error(
143152
`Unexpected token ${attrPath} ${t.literal} as valFilter operator`

test/parse.test.ts

+24
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,30 @@ describe('parse', () => {
229229
`userType eq "Employee" and emails[type eq "work" and value co "@example.com"]`,
230230
and(eq("userType", "Employee"), v("emails", and(eq("type", "work"), op("co", "value", "@example.com"))))
231231
);
232+
test(
233+
`emails[type eq "work"] and emails.value eq "[email protected]"`,
234+
and(
235+
v("emails", eq("type", "work")),
236+
eq("emails.value", "[email protected]"),
237+
)
238+
);
239+
test(
240+
`emails[type eq "work"].value eq "[email protected]"`,
241+
and(
242+
v("emails", eq("type", "work")),
243+
eq("emails.value", "[email protected]"),
244+
)
245+
);
246+
test(
247+
`emails[type eq "work"].value eq "[email protected]" and name eq "foo"`,
248+
and(
249+
and(
250+
v("emails", eq("type", "work")),
251+
eq("emails.value", "[email protected]"),
252+
),
253+
eq("name", "foo"),
254+
)
255+
);
232256
test(
233257
`emails[type eq "work" and value co "@example.com"] or ims[type eq "xmpp" and value co "@foo.com"]`,
234258
or(

test/tokenizer.test.ts

+21
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,18 @@ const assert = chai.assert;
66

77
describe("tokenizer", () => {
88
const tok = (literal: string, type: string) => ({ literal, type } as Token);
9+
910
it("eot", () => {
1011
assert.deepEqual(tokenizer(""), [EOT]);
1112
});
13+
1214
it("false", () => {
1315
assert.deepEqual(tokenizer("false"), [
1416
{ literal: "false", type: "Word" },
1517
EOT
1618
]);
1719
});
20+
1821
it("userName is AttrPath", () => {
1922
assert.deepEqual(tokenizer("userName"), [
2023
{ literal: "userName", type: "Word" },
@@ -28,4 +31,22 @@ describe("tokenizer", () => {
2831
tokenizer("userName eq -12")
2932
);
3033
});
34+
35+
it("sub-attribute after ValPath", () => {
36+
assert.deepEqual(
37+
tokenizer('emails[type eq "work"].value eq "[email protected]"'),
38+
[
39+
tok("emails", "Word"),
40+
tok("[", "Bracket"),
41+
tok("type", "Word"),
42+
tok("eq", "Word"),
43+
tok("\"work\"", "Quoted"),
44+
tok("].", "Bracket"),
45+
tok("value", "Word"),
46+
tok("eq", "Word"),
47+
tok("\"[email protected]\"", "Quoted"),
48+
EOT,
49+
]
50+
)
51+
})
3152
});

0 commit comments

Comments
 (0)