Skip to content

Commit

Permalink
Fix exception reported in #447 (#536)
Browse files Browse the repository at this point in the history
* Basic tokenizer

* Fixed property names

* Tests, round I

* Tests, round II

* tokenizer test

* Remove temorary change

* Fix merge issue

* Merge conflict

* Merge conflict

* Completion test

* Fix last line

* Fix javascript math

* Make test await for results

* Add license headers

* Rename definitions to types

* License headers

* Fix typo in completion details (typo)

* Fix hover test

* Russian translations

* Update to better translation

* Fix typo

*  #70 How to get all parameter info when filling in a function param list

* Fix #70 How to get all parameter info when filling in a function param list

* Clean up

* Clean imports

* CR feedback

* Trim whitespace for test stability

* More tests

* Better handle no-parameters documentation

* Better handle ellipsis and Python3

* Basic services

* Install check

* Output installer messages

* Warn default Mac OS interpreter

* Remove test change

* Add tests

* PR feedback

* CR feedback

* Mock process instead

* Fix Brew detection

* Update test

* Elevated module install

* Fix path check

* Add check suppression option & suppress vor VE by default

* Fix most linter tests

* Merge conflict

* Per-user install

* Handle VE/Conda

* Fix tests

* Remove double service

* #447 Linter throws errors

* Better test names
  • Loading branch information
Mikhail Arkhipov authored Jan 9, 2018
1 parent 0623f19 commit 7f2c239
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 3 deletions.
6 changes: 5 additions & 1 deletion src/client/providers/completionSource.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,10 @@ export class CompletionSource {
const t = new Tokenizer();
const tokens = t.Tokenize(text);
const index = tokens.getItemContaining(document.offsetAt(position));
return index >= 0 && (tokens[index].TokenType === TokenType.String || tokens[index].TokenType === TokenType.Comment);
if (index >= 0) {
const token = tokens.getItemAt(index);
return token.type === TokenType.String || token.type === TokenType.Comment;
}
return false;
}
}
33 changes: 32 additions & 1 deletion src/test/language/textRangeCollection.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ suite('Language.TextRangeCollection', () => {
assert.equal(c.getItemAt(1).start, 4);
assert.equal(c.getItemAt(1).length, 2);
});
test('Contains position', async () => {
test('Contains position (simple)', async () => {
const items: TextRange[] = [];
items.push(new TextRange(2, 1));
items.push(new TextRange(4, 2));
Expand All @@ -43,6 +43,37 @@ suite('Language.TextRangeCollection', () => {
assert.equal(index, results[i]);
}
});
test('Contains position (adjoint)', async () => {
const items: TextRange[] = [];
items.push(new TextRange(2, 1));
items.push(new TextRange(3, 2));
const c = new TextRangeCollection(items);
const results = [-1, -1, 0, 1, 1, -1, -1];
for (let i = 0; i < results.length; i += 1) {
const index = c.getItemContaining(i);
assert.equal(index, results[i]);
}
});
test('Contains position (out of range)', async () => {
const items: TextRange[] = [];
items.push(new TextRange(2, 1));
items.push(new TextRange(4, 2));
const c = new TextRangeCollection(items);
const positions = [-100, -1, 10, 100];
for (const p of positions) {
const index = c.getItemContaining(p);
assert.equal(index, -1);
}
});
test('Contains position (empty)', async () => {
const items: TextRange[] = [];
const c = new TextRangeCollection(items);
const positions = [-2, -1, 0, 1, 2, 3];
for (const p of positions) {
const index = c.getItemContaining(p);
assert.equal(index, -1);
}
});
test('Item at position', async () => {
const items: TextRange[] = [];
items.push(new TextRange(2, 1));
Expand Down
38 changes: 37 additions & 1 deletion src/test/language/tokenizer.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ suite('Language.Tokenizer', () => {
assert.equal(tokens.count, 0);
assert.equal(tokens.length, 0);
});
test('Strings', async () => {
test('Strings: unclosed', async () => {
const t = new Tokenizer();
const tokens = t.Tokenize(' "string" """line1\n#line2"""\t\'un#closed');
assert.equal(tokens.count, 3);
Expand All @@ -28,6 +28,42 @@ suite('Language.Tokenizer', () => {
assert.equal(tokens.getItemAt(i).type, TokenType.String);
}
});
test('Strings: block next to regular, double-quoted', async () => {
const t = new Tokenizer();
const tokens = t.Tokenize('"string""""s2"""');
assert.equal(tokens.count, 2);

const ranges = [0, 8, 8, 8];
for (let i = 0; i < tokens.count; i += 1) {
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
assert.equal(tokens.getItemAt(i).type, TokenType.String);
}
});
test('Strings: block next to block, double-quoted', async () => {
const t = new Tokenizer();
const tokens = t.Tokenize('""""""""');
assert.equal(tokens.count, 2);

const ranges = [0, 6, 6, 2];
for (let i = 0; i < tokens.count; i += 1) {
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
assert.equal(tokens.getItemAt(i).type, TokenType.String);
}
});
test('Strings: unclosed sequence of quotes', async () => {
const t = new Tokenizer();
const tokens = t.Tokenize('"""""');
assert.equal(tokens.count, 1);

const ranges = [0, 5];
for (let i = 0; i < tokens.count; i += 1) {
assert.equal(tokens.getItemAt(i).start, ranges[2 * i]);
assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]);
assert.equal(tokens.getItemAt(i).type, TokenType.String);
}
});
test('Comments', async () => {
const t = new Tokenizer();
const tokens = t.Tokenize(' #co"""mment1\n\t\n#comm\'ent2 ');
Expand Down

0 comments on commit 7f2c239

Please sign in to comment.