Closed pchaigno closed 7 years ago
I don't have permission to push changes to your fork, so here's a diff:
diff --git a/grammars/coffeescript.cson b/grammars/coffeescript.cson
index 7d0d5af..6ec1338 100644
--- a/grammars/coffeescript.cson
+++ b/grammars/coffeescript.cson
@@ -103,10 +103,10 @@
'0':
'name': 'punctuation.definition.string.end.coffee'
'name': 'string.quoted.script.coffee'
+ 'contentName': 'source.embedded.js'
'patterns': [
{
- 'match': '(\\\\)(x[0-9A-Fa-f]{2}|[0-2][0-7]{0,2}|3[0-6][0-7]|37[0-7]?|[4-7][0-7]?|.)'
- 'name': 'constant.character.escape.coffee'
+ 'include': 'source.js'
}
]
}
diff --git a/spec/coffee-script-spec.coffee b/spec/coffee-script-spec.coffee
index 8bd17ca..f831750 100644
--- a/spec/coffee-script-spec.coffee
+++ b/spec/coffee-script-spec.coffee
@@ -267,18 +267,24 @@ describe "CoffeeScript grammar", ->
expect(source.search /{,/).toEqual -1
it "tokenizes embedded JavaScript", ->
- {tokens} = grammar.tokenizeLine("`;`")
- expect(tokens[0]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"]
- expect(tokens[1]).toEqual value: ";", scopes: ["source.coffee", "string.quoted.script.coffee", "constant.character.escape.coffee"]
- expect(tokens[2]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"]
+ waitsForPromise ->
+ atom.packages.activatePackage("language-javascript")
- lines = grammar.tokenizeLines """
- `var a = 1;`
- a = 2
- """
- expect(lines[0][0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"]
- expect(lines[0][1]).toEqual value: 'v', scopes: ["source.coffee", "string.quoted.script.coffee", "constant.character.escape.coffee"]
- expect(lines[1][0]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"]
+ runs ->
+ {tokens} = grammar.tokenizeLine("`;`")
+ expect(tokens[0]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"]
+ expect(tokens[1]).toEqual value: ";", scopes: ["source.coffee", "string.quoted.script.coffee", "source.embedded.js", "punctuation.terminator.statement.js"]
+ expect(tokens[2]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"]
+
+ lines = grammar.tokenizeLines """
+ `var a = 1;`
+ a = 2
+ """
+ expect(lines[0][0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"]
+ expect(lines[0][1]).toEqual value: 'var', scopes: ["source.coffee", "string.quoted.script.coffee", "source.embedded.js", "storage.type.var.js"]
+ expect(lines[0][6]).toEqual value: ';', scopes: ["source.coffee", "string.quoted.script.coffee", "source.embedded.js", "punctuation.terminator.statement.js"]
+ expect(lines[0][7]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"]
+ expect(lines[1][0]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"]
it "tokenizes functions", ->
{tokens} = grammar.tokenizeLine("foo = -> 1")
Thanks @50Wliu! I updated the branch.
This pull request replaces 2
\h
missed in #129./cc @infininight