diff --git a/grammars/html.cson b/grammars/html.cson
index 729b3ba..cd07784 100644
--- a/grammars/html.cson
+++ b/grammars/html.cson
@@ -356,15 +356,16 @@
]
}
{
- 'begin': '(?)([a-zA-Z0-9:-]+)'
+ # Opening tag (optionally self-closing)
+ 'begin': '(<)([a-zA-Z0-9:-]+)'
'beginCaptures':
'1':
'name': 'punctuation.definition.tag.begin.html'
'2':
'name': 'entity.name.tag.other.html'
- 'end': '(>)'
+ 'end': '/?>'
'endCaptures':
- '1':
+ '0':
'name': 'punctuation.definition.tag.end.html'
'name': 'meta.tag.other.html'
'patterns': [
@@ -373,6 +374,20 @@
}
]
}
+ {
+ # Closing tag, which doesn't allow attributes
+ 'begin': '()([a-zA-Z0-9:-]+)'
+ 'beginCaptures':
+ '1':
+ 'name': 'punctuation.definition.tag.begin.html'
+ '2':
+ 'name': 'entity.name.tag.other.html'
+ 'end': '>'
+ 'endCaptures':
+ '0':
+ 'name': 'punctuation.definition.tag.end.html'
+ 'name': 'meta.tag.other.html'
+ }
{
'include': '#text-entities'
}
diff --git a/spec/html-spec.coffee b/spec/html-spec.coffee
index 807ec30..620457a 100644
--- a/spec/html-spec.coffee
+++ b/spec/html-spec.coffee
@@ -616,6 +616,32 @@ describe 'HTML grammar', ->
expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
+ it 'tokenizes unrecognized self-closing tags', ->
+ {tokens} = grammar.tokenizeLine ''
+ expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']
+ expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
+ expect(tokens[2]).toEqual value: '/>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
+
+ it 'tokenizes attributes in opening tags of unrecognized tag names', ->
+ {tokens} = grammar.tokenizeLine ''
+ expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']
+ expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
+ expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.other.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
+ expect(tokens[8]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
+
+ it 'tokenizes the closing tag of an unrecognized tag name', ->
+ {tokens} = grammar.tokenizeLine ''
+ expect(tokens[0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']
+ expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
+ expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
+
+ it 'does not tokenize attributes in closing tags of unrecognized tag names', ->
+ {tokens} = grammar.tokenizeLine ''
+ expect(tokens[0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']
+ expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
+ expect(tokens[2]).toEqual value: ' class="test"', scopes: ['text.html.basic', 'meta.tag.other.html']
+ expect(tokens[3]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
+
it 'tolerates colons in other tag names', ->
{tokens} = grammar.tokenizeLine ''
expect(tokens[1]).toEqual value: 'foo:bar', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']