From 5e202eaddfb29a7a3256178dc7149bf152e8557c Mon Sep 17 00:00:00 2001 From: Fernandez Ludovic Date: Fri, 13 May 2016 22:39:09 +0200 Subject: [PATCH 1/3] refactor: naming conventions and styling - `htmlentities` become `character-reference` - `reference` become `xref` - ordering `explicit` after `code-block` - `code-block` respect conventions - better code blocks patterns --- grammars/language-asciidoc.cson | 277 +++++++++++------- .../repositories/blocks/quote-grammar.cson | 6 +- .../inlines/characters-grammar.cson | 8 +- .../inlines/general-block-macro-grammar.cson | 6 +- .../partials/explicit-paragraph-grammar.cson | 4 +- lib/code-block-generator.coffee | 50 +++- spec/blocks/code-block-grammar-spec.coffee | 14 +- spec/blocks/quote-grammar-spec.coffee | 16 +- spec/code-block-generator-spec.coffee | 57 +++- spec/inlines/characters-grammar-spec.coffee | 6 +- ...coffee => language-properties-spec.coffee} | 0 .../block-callout-grammar-spec.coffee | 4 +- .../explicit-paragraph-grammar-spec.coffee | 114 +++---- styles/asciidoc.atom-text-editor.less | 11 +- 14 files changed, 355 insertions(+), 218 deletions(-) rename spec/{grammar-properties-lang-spec.coffee => language-properties-spec.coffee} (100%) diff --git a/grammars/language-asciidoc.cson b/grammars/language-asciidoc.cson index f4a674a..45dfe04 100644 --- a/grammars/language-asciidoc.cson +++ b/grammars/language-asciidoc.cson @@ -227,10 +227,13 @@ repository: patterns: [ { name: "markup.explicit.asciidoc" - match: "^\\[((normal|literal|listing|TIP|NOTE|IMPORTANT|WARNING|CAUTION|partintro|comment|example|sidebar|source|music|latex|graphviz))\\]$" + match: ''' + ^\\[((normal|literal|listing|TIP|NOTE|IMPORTANT|WARNING|CAUTION|partintro|comment|example|sidebar|source|music|latex|graphviz))\\]$(? @@ -86,11 +111,12 @@ module.exports = begin: '^\\s*(`{3,}).*$' beginCaptures: 0: name: 'support.asciidoc' - patterns: [include: '#block-callout'] + patterns: [ + include: '#block-callout' + ] end: '^\\s*\\1\\s*$' endCaptures: 0: name: 'support.asciidoc' - if debug - console.log CSON.stringify codeBlocks + if debug then console.log CSON.stringify codeBlocks codeBlocks diff --git a/spec/blocks/code-block-grammar-spec.coffee b/spec/blocks/code-block-grammar-spec.coffee index c6d3632..abefc90 100644 --- a/spec/blocks/code-block-grammar-spec.coffee +++ b/spec/blocks/code-block-grammar-spec.coffee @@ -29,9 +29,9 @@ describe 'Should tokenizes code block when', -> expect(tokens).toHaveLength 7 # Number of lines expect(tokens[0]).toHaveLength 5 expect(tokens[0][0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'constant.asciidoc'] + expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.function.asciidoc'] expect(tokens[0][2]).toEqualJson value: ',', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][3]).toEqualJson value: 'shell', scopes: ['source.asciidoc', 'support.asciidoc', 'string.asciidoc'] + expect(tokens[0][3]).toEqualJson value: 'shell', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.type.asciidoc'] expect(tokens[0][4]).toEqualJson value: ']', scopes: ['source.asciidoc', 'support.asciidoc'] expect(tokens[1]).toHaveLength 1 expect(tokens[1][0]).toEqualJson value: '----', scopes: ['source.asciidoc', 'markup.code.shell.asciidoc', 'support.asciidoc'] @@ -77,13 +77,13 @@ describe 'Should tokenizes code block when', -> expect(tokens).toHaveLength 4 # Number of lines expect(tokens[0]).toHaveLength 9 expect(tokens[0][0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'constant.asciidoc'] + expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.function.asciidoc'] expect(tokens[0][2]).toEqualJson value: ',', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][3]).toEqualJson value: 'java', scopes: ['source.asciidoc', 'support.asciidoc', 'string.asciidoc'] + expect(tokens[0][3]).toEqualJson value: 'java', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.type.asciidoc'] expect(tokens[0][4]).toEqualJson value: ',', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][5]).toEqualJson value: 'subs="', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][6]).toEqualJson value: '{markup-in-source}', scopes: ['source.asciidoc', 'support.asciidoc', 'markup.substitution.attribute-reference.asciidoc'] - expect(tokens[0][7]).toEqualJson value: '"', scopes: ['source.asciidoc', 'support.asciidoc'] + expect(tokens[0][5]).toEqualJson value: 'subs="', scopes: ['source.asciidoc', 'support.asciidoc', 'markup.meta.attribute-list.asciidoc'] + expect(tokens[0][6]).toEqualJson value: '{markup-in-source}', scopes: ['source.asciidoc', 'support.asciidoc', 'markup.meta.attribute-list.asciidoc', 'markup.substitution.attribute-reference.asciidoc'] + expect(tokens[0][7]).toEqualJson value: '"', scopes: ['source.asciidoc', 'support.asciidoc', 'markup.meta.attribute-list.asciidoc'] expect(tokens[0][8]).toEqualJson value: ']', scopes: ['source.asciidoc', 'support.asciidoc'] expect(tokens[1]).toHaveLength 1 expect(tokens[1][0]).toEqualJson value: '----', scopes: ['source.asciidoc', 'markup.code.java.asciidoc', 'support.asciidoc'] diff --git a/spec/blocks/quote-grammar-spec.coffee b/spec/blocks/quote-grammar-spec.coffee index c391cf0..b283ae6 100644 --- a/spec/blocks/quote-grammar-spec.coffee +++ b/spec/blocks/quote-grammar-spec.coffee @@ -26,11 +26,11 @@ describe 'Should tokenizes quote block when', -> expect(tokens).toHaveLength 4 expect(tokens[0]).toHaveLength 7 expect(tokens[0][0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[0][1]).toEqualJson value: 'quote', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.label.asciidoc' ] + expect(tokens[0][1]).toEqualJson value: 'quote', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'entity.name.function.label.asciidoc' ] expect(tokens[0][2]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[0][3]).toEqualJson value: 'Erwin Schrödinger', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.attribution.asciidoc'] + expect(tokens[0][3]).toEqualJson value: 'Erwin Schrödinger', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'string.unquoted.attribution.asciidoc'] expect(tokens[0][4]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[0][5]).toEqualJson value: 'Sorry', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.citetitle.asciidoc'] + expect(tokens[0][5]).toEqualJson value: 'Sorry', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'string.unquoted.citetitle.asciidoc'] expect(tokens[0][6]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] expect(tokens[1]).toHaveLength 1 expect(tokens[1][0]).toEqualJson value: '____', scopes: ['source.asciidoc', 'markup.italic.quotes.asciidoc'] @@ -43,9 +43,9 @@ describe 'Should tokenizes quote block when', -> {tokens} = grammar.tokenizeLine '[verse, Homer Simpson]\n' expect(tokens).toHaveLength 6 expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[1]).toEqualJson value: 'verse', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.label.asciidoc'] + expect(tokens[1]).toEqualJson value: 'verse', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'entity.name.function.label.asciidoc'] expect(tokens[2]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[3]).toEqualJson value: 'Homer Simpson', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.attribution.asciidoc'] + expect(tokens[3]).toEqualJson value: 'Homer Simpson', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'string.unquoted.attribution.asciidoc'] expect(tokens[4]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] expect(tokens[5]).toEqualJson value: '\n', scopes: ['source.asciidoc'] @@ -53,11 +53,11 @@ describe 'Should tokenizes quote block when', -> {tokens} = grammar.tokenizeLine '[quote, Erwin Schrödinger, Sorry]\n' expect(tokens).toHaveLength 8 expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[1]).toEqualJson value: 'quote', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.label.asciidoc'] + expect(tokens[1]).toEqualJson value: 'quote', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'entity.name.function.label.asciidoc'] expect(tokens[2]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[3]).toEqualJson value: 'Erwin Schrödinger', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.attribution.asciidoc'] + expect(tokens[3]).toEqualJson value: 'Erwin Schrödinger', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'string.unquoted.attribution.asciidoc'] expect(tokens[4]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] - expect(tokens[5]).toEqualJson value: 'Sorry', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'none.quotes.citetitle.asciidoc'] + expect(tokens[5]).toEqualJson value: 'Sorry', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc', 'string.unquoted.citetitle.asciidoc'] expect(tokens[6]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.italic.quotes.attributes.asciidoc'] expect(tokens[7]).toEqualJson value: '\n', scopes: ['source.asciidoc'] diff --git a/spec/code-block-generator-spec.coffee b/spec/code-block-generator-spec.coffee index 2c029b1..40ab5c6 100644 --- a/spec/code-block-generator-spec.coffee +++ b/spec/code-block-generator-spec.coffee @@ -7,35 +7,62 @@ describe 'Code block generator', -> it 'should generate default code block', -> languages = [] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 2 # Number of blocks + expect(codeBlocks).toHaveLength 3 # Number of blocks expect(codeBlocks[0]).toEqualJson - begin: '^\\[(source)(?:,([^,\\]]*)){0,2}\\]$' + begin: '^\\[(source)(,([^\\]]*))?\\]$' beginCaptures: 0: name: 'support.asciidoc' - 1: name: 'constant.asciidoc' - 2: name: 'string.asciidoc' + 1: name: 'entity.name.function.asciidoc' + 2: name: 'markup.meta.attribute-list.asciidoc' + end: '(?<=----)[\\r\\n]+$' patterns: [ name: 'markup.raw.asciidoc' begin: '^(-{4,})\\s*$' beginCaptures: 0: name: 'support.asciidoc' - patterns: [include: '#block-callout'] + patterns: [ + include: '#block-callout' + ] end: '^\\1*$' endCaptures: 0: name: 'support.asciidoc' ] + + it 'should generate default code block with attributes only', -> + languages = [] + codeBlocks = generator.makeAsciidocBlocks(languages) + expect(codeBlocks).toHaveLength 3 # Number of blocks + expect(codeBlocks[1]).toEqualJson + begin: '^\\[([^\\]]+)\\]$' + beginCaptures: + 0: name: 'support.asciidoc' + 1: name: 'markup.meta.attribute-list.asciidocc' end: '(?<=----)[\\r\\n]+$' + patterns: [ + name: 'markup.raw.asciidoc' + begin: '^(-{4,})\\s*$' + beginCaptures: + 0: name: 'support.asciidoc' + patterns: [ + include: '#block-callout' + ] + end: '^\\1*$' + endCaptures: + 0: name: 'support.asciidoc' + ] it 'should generate listing block', -> languages = [] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 2 # Number of blocks - expect(codeBlocks[1]).toEqualJson + expect(codeBlocks).toHaveLength 3 # Number of blocks + expect(codeBlocks[2]).toEqualJson name: 'markup.raw.asciidoc' begin: '^(-{4,})\\s*$' beginCaptures: 0: name: 'support.asciidoc' - patterns: [include: '#block-callout'] + patterns: [ + include: '#block-callout' + ] end: '^\\1*$' endCaptures: 0: name: 'support.asciidoc' @@ -45,14 +72,15 @@ describe 'Code block generator', -> pattern: 'javascript|js', type: 'source', code: 'js' ] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 3 # Number of blocks + expect(codeBlocks).toHaveLength 4 # Number of blocks expect(codeBlocks[0]).toEqualJson begin: '^\\[(source),\\p{Blank}*(?i:(javascript|js))(?:,([^\]]*))?\\]$' beginCaptures: 0: name: 'support.asciidoc' - 1: name: 'constant.asciidoc' - 2: name: 'string.asciidoc' + 1: name: 'entity.name.function.asciidoc' + 2: name: 'entity.name.type.asciidoc' 3: + name: 'markup.meta.attribute-list.asciidoc' patterns: [ include: '#attribute-reference' ] @@ -78,14 +106,15 @@ describe 'Code block generator', -> pattern: 'c(pp|\\+\\+)', type: 'source', code: 'cpp' ] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 3 # Number of blocks + expect(codeBlocks).toHaveLength 4 # Number of blocks expect(codeBlocks[0]).toEqualJson begin: '^\\[(source),\\p{Blank}*(?i:(c(pp|\\+\\+)))(?:,([^\]]*))?\\]$' beginCaptures: 0: name: 'support.asciidoc' - 1: name: 'constant.asciidoc' - 2: name: 'string.asciidoc' + 1: name: 'entity.name.function.asciidoc' + 2: name: 'entity.name.type.asciidoc' 3: + name: 'markup.meta.attribute-list.asciidoc' patterns: [ include: '#attribute-reference' ] diff --git a/spec/inlines/characters-grammar-spec.coffee b/spec/inlines/characters-grammar-spec.coffee index 0ccf2d1..a6ee0f8 100644 --- a/spec/inlines/characters-grammar-spec.coffee +++ b/spec/inlines/characters-grammar-spec.coffee @@ -20,9 +20,9 @@ describe 'Should tokenizes characters when', -> {tokens} = grammar.tokenizeLine 'Dungeons & Dragons' expect(tokens).toHaveLength 5 expect(tokens[0]).toEqualJson value: 'Dungeons ', scopes: ['source.asciidoc'] - expect(tokens[1]).toEqualJson value: '&', scopes: ['source.asciidoc', 'markup.htmlentity.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqualJson value: 'amp', scopes: ['source.asciidoc', 'markup.htmlentity.asciidoc'] - expect(tokens[3]).toEqualJson value: ';', scopes: ['source.asciidoc', 'markup.htmlentity.asciidoc', 'support.constant.asciidoc'] + expect(tokens[1]).toEqualJson value: '&', scopes: ['source.asciidoc', 'markup.character-reference.asciidoc', 'constant.character.asciidoc'] + expect(tokens[2]).toEqualJson value: 'amp', scopes: ['source.asciidoc', 'markup.character-reference.asciidoc'] + expect(tokens[3]).toEqualJson value: ';', scopes: ['source.asciidoc', 'markup.character-reference.asciidoc', 'constant.character.asciidoc'] expect(tokens[4]).toEqualJson value: ' Dragons', scopes: ['source.asciidoc'] it 'contains space (invalid context)', -> diff --git a/spec/grammar-properties-lang-spec.coffee b/spec/language-properties-spec.coffee similarity index 100% rename from spec/grammar-properties-lang-spec.coffee rename to spec/language-properties-spec.coffee diff --git a/spec/partials/block-callout-grammar-spec.coffee b/spec/partials/block-callout-grammar-spec.coffee index df1e0e2..d58bbcc 100644 --- a/spec/partials/block-callout-grammar-spec.coffee +++ b/spec/partials/block-callout-grammar-spec.coffee @@ -29,9 +29,9 @@ describe 'Should tokenizes callout in code block when', -> expect(tokens).toHaveLength 8 expect(tokens[0]).toHaveLength 5 expect(tokens[0][0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'constant.asciidoc'] + expect(tokens[0][1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.function.asciidoc'] expect(tokens[0][2]).toEqualJson value: ', ', scopes: ['source.asciidoc', 'support.asciidoc'] - expect(tokens[0][3]).toEqualJson value: 'js', scopes: ['source.asciidoc', 'support.asciidoc', 'string.asciidoc'] + expect(tokens[0][3]).toEqualJson value: 'js', scopes: ['source.asciidoc', 'support.asciidoc', 'entity.name.type.asciidoc'] expect(tokens[0][4]).toEqualJson value: ']', scopes: ['source.asciidoc', 'support.asciidoc'] expect(tokens[1]).toHaveLength 1 expect(tokens[1][0]).toEqualJson value: '----', scopes: ['source.asciidoc', 'markup.code.js.asciidoc', 'support.asciidoc'] diff --git a/spec/partials/explicit-paragraph-grammar-spec.coffee b/spec/partials/explicit-paragraph-grammar-spec.coffee index c067cb4..f6583f8 100644 --- a/spec/partials/explicit-paragraph-grammar-spec.coffee +++ b/spec/partials/explicit-paragraph-grammar-spec.coffee @@ -19,121 +19,121 @@ describe 'Should tokenizes explicit paragraph when', -> it 'use "normal" keyword', -> {tokens} = grammar.tokenizeLine '[normal]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'normal', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'normal', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "literal" keyword', -> {tokens} = grammar.tokenizeLine '[literal]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'literal', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'literal', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "listing" keyword', -> {tokens} = grammar.tokenizeLine '[listing]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'listing', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'listing', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "TIP" keyword', -> {tokens} = grammar.tokenizeLine '[TIP]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'TIP', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'TIP', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "NOTE" keyword', -> {tokens} = grammar.tokenizeLine '[NOTE]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'NOTE', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'NOTE', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "IMPORTANT" keyword', -> {tokens} = grammar.tokenizeLine '[IMPORTANT]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'IMPORTANT', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'IMPORTANT', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "WARNING" keyword', -> {tokens} = grammar.tokenizeLine '[WARNING]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'WARNING', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'WARNING', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "CAUTION" keyword', -> {tokens} = grammar.tokenizeLine '[CAUTION]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'CAUTION', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'CAUTION', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "partintro" keyword', -> {tokens} = grammar.tokenizeLine '[partintro]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'partintro', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'partintro', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "comment" keyword', -> {tokens} = grammar.tokenizeLine '[comment]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'comment', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'comment', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "example" keyword', -> {tokens} = grammar.tokenizeLine '[example]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'example', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'example', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "WARNING" keyword', -> {tokens} = grammar.tokenizeLine '[WARNING]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'WARNING', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'WARNING', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "sidebar" keyword', -> {tokens} = grammar.tokenizeLine '[sidebar]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'sidebar', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'sidebar', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "source" keyword', -> {tokens} = grammar.tokenizeLine '[source]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'source', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'source', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "music" keyword', -> {tokens} = grammar.tokenizeLine '[music]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'music', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'music', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "latex" keyword', -> {tokens} = grammar.tokenizeLine '[latex]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'latex', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'latex', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'use "graphviz" keyword', -> {tokens} = grammar.tokenizeLine '[graphviz]' expect(tokens).toHaveLength 3 - expect(tokens[0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[1]).toEqual value: 'graphviz', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[1]).toEqualJson value: 'graphviz', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] it 'simple title with example block', -> @@ -145,12 +145,12 @@ describe 'Should tokenizes explicit paragraph when', -> ''' expect(tokens).toHaveLength 4 expect(tokens[0]).toHaveLength 3 - expect(tokens[0][0]).toEqual value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] - expect(tokens[0][1]).toEqual value: 'example', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'support.constant.asciidoc'] - expect(tokens[0][2]).toEqual value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0][0]).toEqualJson value: '[', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] + expect(tokens[0][1]).toEqualJson value: 'example', scopes: ['source.asciidoc', 'markup.explicit.asciidoc', 'entity.name.function.asciidoc'] + expect(tokens[0][2]).toEqualJson value: ']', scopes: ['source.asciidoc', 'markup.explicit.asciidoc'] expect(tokens[1]).toHaveLength 1 - expect(tokens[1][0]).toEqual value: '====', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] + expect(tokens[1][0]).toEqualJson value: '====', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] expect(tokens[2]).toHaveLength 1 - expect(tokens[2][0]).toEqual value: 'foobar', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] + expect(tokens[2][0]).toEqualJson value: 'foobar', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] expect(tokens[3]).toHaveLength 1 - expect(tokens[3][0]).toEqual value: '====', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] + expect(tokens[3][0]).toEqualJson value: '====', scopes: ['source.asciidoc', 'markup.block.example.asciidoc'] diff --git a/styles/asciidoc.atom-text-editor.less b/styles/asciidoc.atom-text-editor.less index c908b97..27afc42 100644 --- a/styles/asciidoc.atom-text-editor.less +++ b/styles/asciidoc.atom-text-editor.less @@ -15,7 +15,7 @@ atom-text-editor::shadow, :host { font-style: italic; } - &.htmlentity { + &.character-reference { font-style: italic; color: mix(red, @syntax-text-color, 20%); } @@ -26,9 +26,8 @@ atom-text-editor::shadow, :host { color: mix(green, @syntax-text-color, 20%); } - &.reference { + &.xref { font-style: italic; - color: mix(green, @syntax-text-color, 20%); } &.admonition, @@ -44,18 +43,18 @@ atom-text-editor::shadow, :host { font-weight: bold; // Lighten headers for dark themes. - & when (lightness(@syntax-background-color) < 50%){ + & when (lightness(@syntax-background-color) < 50%) { color: lighten(@syntax-text-color, 20%); } // Darken headers for light themes. - & when (lightness(@syntax-background-color) > 50%){ + & when (lightness(@syntax-background-color) > 50%) { color: darken(@syntax-text-color, 20%); } } &.heading.blocktitle, - &.substitution{ + &.substitution { color: @syntax-text-color-unobtrusive; } &.meta.attribute-list { From 42989ec74130148835dd59253bd82d20677514ed Mon Sep 17 00:00:00 2001 From: Fernandez Ludovic Date: Sun, 15 May 2016 02:40:48 +0200 Subject: [PATCH 2/3] refactor: cancel generic block with attributes only --- grammars/language-asciidoc.cson | 27 ---------------------- lib/code-block-generator.coffee | 20 ---------------- spec/code-block-generator-spec.coffee | 33 ++++----------------------- 3 files changed, 5 insertions(+), 75 deletions(-) diff --git a/grammars/language-asciidoc.cson b/grammars/language-asciidoc.cson index 45dfe04..f1a65ef 100644 --- a/grammars/language-asciidoc.cson +++ b/grammars/language-asciidoc.cson @@ -2679,33 +2679,6 @@ repository: } ] } - { - begin: "^\\[([^\\]]+)\\]$" - beginCaptures: - "0": - name: "support.asciidoc" - "1": - name: "markup.meta.attribute-list.asciidocc" - end: "(?<=----)[\\r\\n]+$" - patterns: [ - { - name: "markup.raw.asciidoc" - begin: "^(-{4,})\\s*$" - beginCaptures: - "0": - name: "support.asciidoc" - patterns: [ - { - include: "#block-callout" - } - ] - end: "^\\1*$" - endCaptures: - "0": - name: "support.asciidoc" - } - ] - } { name: "markup.raw.asciidoc" begin: "^(-{4,})\\s*$" diff --git a/lib/code-block-generator.coffee b/lib/code-block-generator.coffee index 1b913bd..f0cedcc 100644 --- a/lib/code-block-generator.coffee +++ b/lib/code-block-generator.coffee @@ -52,26 +52,6 @@ module.exports = 0: name: 'support.asciidoc' ] - # add generic block with attributes only - codeBlocks.push - begin: '^\\[([^\\]]+)\\]$' - beginCaptures: - 0: name: 'support.asciidoc' - 1: name: 'markup.meta.attribute-list.asciidocc' - end: '(?<=----)[\\r\\n]+$' - patterns: [ - name: 'markup.raw.asciidoc' - begin: '^(-{4,})\\s*$' - beginCaptures: - 0: name: 'support.asciidoc' - patterns: [ - include: '#block-callout' - ] - end: '^\\1*$' - endCaptures: - 0: name: 'support.asciidoc' - ] - # add listing block codeBlocks.push name: 'markup.raw.asciidoc' diff --git a/spec/code-block-generator-spec.coffee b/spec/code-block-generator-spec.coffee index 40ab5c6..9529ac6 100644 --- a/spec/code-block-generator-spec.coffee +++ b/spec/code-block-generator-spec.coffee @@ -7,7 +7,7 @@ describe 'Code block generator', -> it 'should generate default code block', -> languages = [] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 3 # Number of blocks + expect(codeBlocks).toHaveLength 2 # Number of blocks expect(codeBlocks[0]).toEqualJson begin: '^\\[(source)(,([^\\]]*))?\\]$' beginCaptures: @@ -28,34 +28,11 @@ describe 'Code block generator', -> 0: name: 'support.asciidoc' ] - it 'should generate default code block with attributes only', -> - languages = [] - codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 3 # Number of blocks - expect(codeBlocks[1]).toEqualJson - begin: '^\\[([^\\]]+)\\]$' - beginCaptures: - 0: name: 'support.asciidoc' - 1: name: 'markup.meta.attribute-list.asciidocc' - end: '(?<=----)[\\r\\n]+$' - patterns: [ - name: 'markup.raw.asciidoc' - begin: '^(-{4,})\\s*$' - beginCaptures: - 0: name: 'support.asciidoc' - patterns: [ - include: '#block-callout' - ] - end: '^\\1*$' - endCaptures: - 0: name: 'support.asciidoc' - ] - it 'should generate listing block', -> languages = [] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 3 # Number of blocks - expect(codeBlocks[2]).toEqualJson + expect(codeBlocks).toHaveLength 2 # Number of blocks + expect(codeBlocks[1]).toEqualJson name: 'markup.raw.asciidoc' begin: '^(-{4,})\\s*$' beginCaptures: @@ -72,7 +49,7 @@ describe 'Code block generator', -> pattern: 'javascript|js', type: 'source', code: 'js' ] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 4 # Number of blocks + expect(codeBlocks).toHaveLength 3 # Number of blocks expect(codeBlocks[0]).toEqualJson begin: '^\\[(source),\\p{Blank}*(?i:(javascript|js))(?:,([^\]]*))?\\]$' beginCaptures: @@ -106,7 +83,7 @@ describe 'Code block generator', -> pattern: 'c(pp|\\+\\+)', type: 'source', code: 'cpp' ] codeBlocks = generator.makeAsciidocBlocks(languages) - expect(codeBlocks).toHaveLength 4 # Number of blocks + expect(codeBlocks).toHaveLength 3 # Number of blocks expect(codeBlocks[0]).toEqualJson begin: '^\\[(source),\\p{Blank}*(?i:(c(pp|\\+\\+)))(?:,([^\]]*))?\\]$' beginCaptures: From 37e1597df95563c7e2a2a579f5d8ae5bda843e4f Mon Sep 17 00:00:00 2001 From: Fernandez Ludovic Date: Sun, 15 May 2016 13:56:42 +0200 Subject: [PATCH 3/3] refactor: ordering patterms --- grammars/language-asciidoc.cson | 359 +++++++++--------- .../partials/explicit-paragraph-grammar.cson | 2 +- lib/code-block-generator.coffee | 8 +- 3 files changed, 183 insertions(+), 186 deletions(-) diff --git a/grammars/language-asciidoc.cson b/grammars/language-asciidoc.cson index f1a65ef..76cddd5 100644 --- a/grammars/language-asciidoc.cson +++ b/grammars/language-asciidoc.cson @@ -227,10 +227,7 @@ repository: patterns: [ { name: "markup.explicit.asciidoc" - match: ''' - ^\\[((normal|literal|listing|TIP|NOTE|IMPORTANT|WARNING|CAUTION|partintro|comment|example|sidebar|source|music|latex|graphviz))\\]$(?