Skip to content

Commit 7f2c239

Browse files
author
Mikhail Arkhipov
authored
Fix exception reported in #447 (#536)
* Basic tokenizer * Fixed property names * Tests, round I * Tests, round II * tokenizer test * Remove temorary change * Fix merge issue * Merge conflict * Merge conflict * Completion test * Fix last line * Fix javascript math * Make test await for results * Add license headers * Rename definitions to types * License headers * Fix typo in completion details (typo) * Fix hover test * Russian translations * Update to better translation * Fix typo * #70 How to get all parameter info when filling in a function param list * Fix #70 How to get all parameter info when filling in a function param list * Clean up * Clean imports * CR feedback * Trim whitespace for test stability * More tests * Better handle no-parameters documentation * Better handle ellipsis and Python3 * Basic services * Install check * Output installer messages * Warn default Mac OS interpreter * Remove test change * Add tests * PR feedback * CR feedback * Mock process instead * Fix Brew detection * Update test * Elevated module install * Fix path check * Add check suppression option & suppress vor VE by default * Fix most linter tests * Merge conflict * Per-user install * Handle VE/Conda * Fix tests * Remove double service * #447 Linter throws errors * Better test names
1 parent 0623f19 commit 7f2c239

File tree

3 files changed

+74
-3
lines changed

3 files changed

+74
-3
lines changed

src/client/providers/completionSource.ts

+5-1
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,10 @@ export class CompletionSource {
117117
const t = new Tokenizer();
118118
const tokens = t.Tokenize(text);
119119
const index = tokens.getItemContaining(document.offsetAt(position));
120-
return index >= 0 && (tokens[index].TokenType === TokenType.String || tokens[index].TokenType === TokenType.Comment);
120+
if (index >= 0) {
121+
const token = tokens.getItemAt(index);
122+
return token.type === TokenType.String || token.type === TokenType.Comment;
123+
}
124+
return false;
121125
}
122126
}

src/test/language/textRangeCollection.test.ts

+32-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ suite('Language.TextRangeCollection', () => {
3232
assert.equal(c.getItemAt(1).start, 4);
3333
assert.equal(c.getItemAt(1).length, 2);
3434
});
35-
test('Contains position', async () => {
35+
test('Contains position (simple)', async () => {
3636
const items: TextRange[] = [];
3737
items.push(new TextRange(2, 1));
3838
items.push(new TextRange(4, 2));
@@ -43,6 +43,37 @@ suite('Language.TextRangeCollection', () => {
4343
assert.equal(index, results[i]);
4444
}
4545
});
46+
test('Contains position (adjoint)', async () => {
47+
const items: TextRange[] = [];
48+
items.push(new TextRange(2, 1));
49+
items.push(new TextRange(3, 2));
50+
const c = new TextRangeCollection(items);
51+
const results = [-1, -1, 0, 1, 1, -1, -1];
52+
for (let i = 0; i < results.length; i += 1) {
53+
const index = c.getItemContaining(i);
54+
assert.equal(index, results[i]);
55+
}
56+
});
57+
test('Contains position (out of range)', async () => {
58+
const items: TextRange[] = [];
59+
items.push(new TextRange(2, 1));
60+
items.push(new TextRange(4, 2));
61+
const c = new TextRangeCollection(items);
62+
const positions = [-100, -1, 10, 100];
63+
for (const p of positions) {
64+
const index = c.getItemContaining(p);
65+
assert.equal(index, -1);
66+
}
67+
});
68+
test('Contains position (empty)', async () => {
69+
const items: TextRange[] = [];
70+
const c = new TextRangeCollection(items);
71+
const positions = [-2, -1, 0, 1, 2, 3];
72+
for (const p of positions) {
73+
const index = c.getItemContaining(p);
74+
assert.equal(index, -1);
75+
}
76+
});
4677
test('Item at position', async () => {
4778
const items: TextRange[] = [];
4879
items.push(new TextRange(2, 1));

src/test/language/tokenizer.test.ts

+37-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ suite('Language.Tokenizer', () => {
1616
assert.equal(tokens.count, 0);
1717
assert.equal(tokens.length, 0);
1818
});
19-
test('Strings', async () => {
19+
test('Strings: unclosed', async () => {
2020
const t = new Tokenizer();
2121
const tokens = t.Tokenize(' "string" """line1\n#line2"""\t\'un#closed');
2222
assert.equal(tokens.count, 3);
@@ -28,6 +28,42 @@ suite('Language.Tokenizer', () => {
2828
assert.equal(tokens.getItemAt(i).type, TokenType.String);
2929
}
3030
});
31+
test('Strings: block next to regular, double-quoted', async () => {
32+
const t = new Tokenizer();
33+
const tokens = t.Tokenize('"string""""s2"""');
34+
assert.equal(tokens.count, 2);
35+
36+
const ranges = [0, 8, 8, 8];
37+
for (let i = 0; i < tokens.count; i += 1) {
38+
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
39+
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
40+
assert.equal(tokens.getItemAt(i).type, TokenType.String);
41+
}
42+
});
43+
test('Strings: block next to block, double-quoted', async () => {
44+
const t = new Tokenizer();
45+
const tokens = t.Tokenize('""""""""');
46+
assert.equal(tokens.count, 2);
47+
48+
const ranges = [0, 6, 6, 2];
49+
for (let i = 0; i < tokens.count; i += 1) {
50+
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
51+
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
52+
assert.equal(tokens.getItemAt(i).type, TokenType.String);
53+
}
54+
});
55+
test('Strings: unclosed sequence of quotes', async () => {
56+
const t = new Tokenizer();
57+
const tokens = t.Tokenize('"""""');
58+
assert.equal(tokens.count, 1);
59+
60+
const ranges = [0, 5];
61+
for (let i = 0; i < tokens.count; i += 1) {
62+
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
63+
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
64+
assert.equal(tokens.getItemAt(i).type, TokenType.String);
65+
}
66+
});
3167
test('Comments', async () => {
3268
const t = new Tokenizer();
3369
const tokens = t.Tokenize(' #co"""mment1\n\t\n#comm\'ent2 ');

0 commit comments

Comments
 (0)