Merge pull request #252 from stencilproject/fix/lexer-range

Fix lexer range calculation for tokens
This commit is contained in:
David Jennes
2018-09-26 03:18:50 +02:00
committed by GitHub
3 changed files with 22 additions and 5 deletions

View File

@@ -1,5 +1,14 @@
# Stencil Changelog # Stencil Changelog
## Master
### Bug Fixes
- Fixed a bug in Stencil 0.13 where tags without spaces were incorrectly parsed.
[David Jennes](https://github.com/djbe)
[#252](https://github.com/stencilproject/Stencil/pull/252)
## 0.13.0 ## 0.13.0
### Breaking ### Breaking

View File

@@ -146,7 +146,7 @@ class Scanner {
for (index, char) in content.unicodeScalars.enumerated() { for (index, char) in content.unicodeScalars.enumerated() {
if foundChar && char == Scanner.tokenEndDelimiter { if foundChar && char == Scanner.tokenEndDelimiter {
let result = String(content.prefix(index)) let result = String(content.prefix(index + 1))
content = String(content.dropFirst(index + 1)) content = String(content.dropFirst(index + 1))
range = range.upperBound..<originalContent.index(range.upperBound, offsetBy: index + 1) range = range.upperBound..<originalContent.index(range.upperBound, offsetBy: index + 1)
return result return result

View File

@@ -16,7 +16,7 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize() let tokens = lexer.tokenize()
try expect(tokens.count) == 1 try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(location: ("Hello World", 1, 0))) try expect(tokens.first) == .text(value: "Hello World", at: makeSourceMap("Hello World", for: lexer))
} }
$0.it("can tokenize a comment") { $0.it("can tokenize a comment") {
@@ -24,7 +24,7 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize() let tokens = lexer.tokenize()
try expect(tokens.count) == 1 try expect(tokens.count) == 1
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(location: ("{# Comment #}", 1, 3))) try expect(tokens.first) == .comment(value: "Comment", at: makeSourceMap("Comment", for: lexer))
} }
$0.it("can tokenize a variable") { $0.it("can tokenize a variable") {
@@ -32,7 +32,15 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize() let tokens = lexer.tokenize()
try expect(tokens.count) == 1 try expect(tokens.count) == 1
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(location: ("{{ Variable }}", 1, 3))) try expect(tokens.first) == .variable(value: "Variable", at: makeSourceMap("Variable", for: lexer))
}
$0.it("can tokenize a token without spaces") {
let lexer = Lexer(templateString: "{{Variable}}")
let tokens = lexer.tokenize()
try expect(tokens.count) == 1
try expect(tokens.first) == .variable(value: "Variable", at: makeSourceMap("Variable", for: lexer))
} }
$0.it("can tokenize unclosed tag by ignoring it") { $0.it("can tokenize unclosed tag by ignoring it") {
@@ -41,7 +49,7 @@ class LexerTests: XCTestCase {
let tokens = lexer.tokenize() let tokens = lexer.tokenize()
try expect(tokens.count) == 1 try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "", at: SourceMap(location: ("{{ thing", 1, 0))) try expect(tokens.first) == .text(value: "", at: makeSourceMap("{{ thing", for: lexer))
} }
$0.it("can tokenize a mixture of content") { $0.it("can tokenize a mixture of content") {