diff --git a/src/client/providers/completionSource.ts b/src/client/providers/completionSource.ts index 58a0a48ae835..d238b9c5636c 100644 --- a/src/client/providers/completionSource.ts +++ b/src/client/providers/completionSource.ts @@ -117,6 +117,10 @@ export class CompletionSource { const t = new Tokenizer(); const tokens = t.Tokenize(text); const index = tokens.getItemContaining(document.offsetAt(position)); - return index >= 0 && (tokens[index].TokenType === TokenType.String || tokens[index].TokenType === TokenType.Comment); + if (index >= 0) { + const token = tokens.getItemAt(index); + return token.type === TokenType.String || token.type === TokenType.Comment; + } + return false; } } diff --git a/src/test/language/textRangeCollection.test.ts b/src/test/language/textRangeCollection.test.ts index 32522e63c778..53e5ff4dc650 100644 --- a/src/test/language/textRangeCollection.test.ts +++ b/src/test/language/textRangeCollection.test.ts @@ -32,7 +32,7 @@ suite('Language.TextRangeCollection', () => { assert.equal(c.getItemAt(1).start, 4); assert.equal(c.getItemAt(1).length, 2); }); - test('Contains position', async () => { + test('Contains position (simple)', async () => { const items: TextRange[] = []; items.push(new TextRange(2, 1)); items.push(new TextRange(4, 2)); @@ -43,6 +43,37 @@ suite('Language.TextRangeCollection', () => { assert.equal(index, results[i]); } }); + test('Contains position (adjoint)', async () => { + const items: TextRange[] = []; + items.push(new TextRange(2, 1)); + items.push(new TextRange(3, 2)); + const c = new TextRangeCollection(items); + const results = [-1, -1, 0, 1, 1, -1, -1]; + for (let i = 0; i < results.length; i += 1) { + const index = c.getItemContaining(i); + assert.equal(index, results[i]); + } + }); + test('Contains position (out of range)', async () => { + const items: TextRange[] = []; + items.push(new TextRange(2, 1)); + items.push(new TextRange(4, 2)); + const c = new TextRangeCollection(items); + const positions = [-100, -1, 10, 100]; + for (const p of positions) { + const index = c.getItemContaining(p); + assert.equal(index, -1); + } + }); + test('Contains position (empty)', async () => { + const items: TextRange[] = []; + const c = new TextRangeCollection(items); + const positions = [-2, -1, 0, 1, 2, 3]; + for (const p of positions) { + const index = c.getItemContaining(p); + assert.equal(index, -1); + } + }); test('Item at position', async () => { const items: TextRange[] = []; items.push(new TextRange(2, 1)); diff --git a/src/test/language/tokenizer.test.ts b/src/test/language/tokenizer.test.ts index 7642b88acfaa..cef3b08e5fb1 100644 --- a/src/test/language/tokenizer.test.ts +++ b/src/test/language/tokenizer.test.ts @@ -16,7 +16,7 @@ suite('Language.Tokenizer', () => { assert.equal(tokens.count, 0); assert.equal(tokens.length, 0); }); - test('Strings', async () => { + test('Strings: unclosed', async () => { const t = new Tokenizer(); const tokens = t.Tokenize(' "string" """line1\n#line2"""\t\'un#closed'); assert.equal(tokens.count, 3); @@ -28,6 +28,42 @@ suite('Language.Tokenizer', () => { assert.equal(tokens.getItemAt(i).type, TokenType.String); } }); + test('Strings: block next to regular, double-quoted', async () => { + const t = new Tokenizer(); + const tokens = t.Tokenize('"string""""s2"""'); + assert.equal(tokens.count, 2); + + const ranges = [0, 8, 8, 8]; + for (let i = 0; i < tokens.count; i += 1) { + assert.equal(tokens.getItemAt(i).start, ranges[2 * i]); + assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]); + assert.equal(tokens.getItemAt(i).type, TokenType.String); + } + }); + test('Strings: block next to block, double-quoted', async () => { + const t = new Tokenizer(); + const tokens = t.Tokenize('""""""""'); + assert.equal(tokens.count, 2); + + const ranges = [0, 6, 6, 2]; + for (let i = 0; i < tokens.count; i += 1) { + assert.equal(tokens.getItemAt(i).start, ranges[2 * i]); + assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]); + assert.equal(tokens.getItemAt(i).type, TokenType.String); + } + }); + test('Strings: unclosed sequence of quotes', async () => { + const t = new Tokenizer(); + const tokens = t.Tokenize('"""""'); + assert.equal(tokens.count, 1); + + const ranges = [0, 5]; + for (let i = 0; i < tokens.count; i += 1) { + assert.equal(tokens.getItemAt(i).start, ranges[2 * i]); + assert.equal(tokens.getItemAt(i).length, ranges[2 * i + 1]); + assert.equal(tokens.getItemAt(i).type, TokenType.String); + } + }); test('Comments', async () => { const t = new Tokenizer(); const tokens = t.Tokenize(' #co"""mment1\n\t\n#comm\'ent2 ');