process template lines when Lexer is created not when parsing each token. (#230)
This commit is contained in:
@@ -66,11 +66,11 @@ open class SimpleErrorReporter: ErrorReporter {
|
|||||||
|
|
||||||
func describe(token: Token) -> String {
|
func describe(token: Token) -> String {
|
||||||
let templateName = token.sourceMap.filename ?? ""
|
let templateName = token.sourceMap.filename ?? ""
|
||||||
let line = token.sourceMap.line
|
let location = token.sourceMap.location
|
||||||
let highlight = "\(String(Array(repeating: " ", count: line.offset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))"
|
let highlight = "\(String(Array(repeating: " ", count: location.lineOffset)))^\(String(Array(repeating: "~", count: max(token.contents.characters.count - 1, 0))))"
|
||||||
|
|
||||||
return "\(templateName)\(line.number):\(line.offset): error: \(templateError.reason)\n"
|
return "\(templateName)\(location.lineNumber):\(location.lineOffset): error: \(templateError.reason)\n"
|
||||||
+ "\(line.content)\n"
|
+ "\(location.content)\n"
|
||||||
+ "\(highlight)\n"
|
+ "\(highlight)\n"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
import Foundation
|
import Foundation
|
||||||
|
|
||||||
|
typealias Line = (content: String, number: UInt, range: Range<String.Index>)
|
||||||
|
|
||||||
struct Lexer {
|
struct Lexer {
|
||||||
let templateName: String?
|
let templateName: String?
|
||||||
let templateString: String
|
let templateString: String
|
||||||
|
let lines: [Line]
|
||||||
|
|
||||||
init(templateName: String? = nil, templateString: String) {
|
init(templateName: String? = nil, templateString: String) {
|
||||||
self.templateName = templateName
|
self.templateName = templateName
|
||||||
self.templateString = templateString
|
self.templateString = templateString
|
||||||
|
|
||||||
|
self.lines = templateString.components(separatedBy: .newlines).enumerated().flatMap {
|
||||||
|
guard !$0.element.isEmpty else { return nil }
|
||||||
|
return (content: $0.element, number: UInt($0.offset + 1), templateString.range(of: $0.element)!)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func createToken(string: String, at range: Range<String.Index>) -> Token {
|
func createToken(string: String, at range: Range<String.Index>) -> Token {
|
||||||
@@ -25,8 +33,8 @@ struct Lexer {
|
|||||||
if string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#") {
|
if string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#") {
|
||||||
let value = strip()
|
let value = strip()
|
||||||
let range = templateString.range(of: value, range: range) ?? range
|
let range = templateString.range(of: value, range: range) ?? range
|
||||||
let line = templateString.rangeLine(range)
|
let location = rangeLocation(range)
|
||||||
let sourceMap = SourceMap(filename: templateName, line: line)
|
let sourceMap = SourceMap(filename: templateName, location: location)
|
||||||
|
|
||||||
if string.hasPrefix("{{") {
|
if string.hasPrefix("{{") {
|
||||||
return .variable(value: value, at: sourceMap)
|
return .variable(value: value, at: sourceMap)
|
||||||
@@ -37,8 +45,8 @@ struct Lexer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let line = templateString.rangeLine(range)
|
let location = rangeLocation(range)
|
||||||
let sourceMap = SourceMap(filename: templateName, line: line)
|
let sourceMap = SourceMap(filename: templateName, location: location)
|
||||||
return .text(value: string, at: sourceMap)
|
return .text(value: string, at: sourceMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -72,6 +80,14 @@ struct Lexer {
|
|||||||
return tokens
|
return tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func rangeLocation(_ range: Range<String.Index>) -> ContentLocation {
|
||||||
|
guard let line = self.lines.first(where: { $0.range.contains(range.lowerBound) }) else {
|
||||||
|
return ("", 0, 0)
|
||||||
|
}
|
||||||
|
let offset = templateString.distance(from: line.range.lowerBound, to: range.lowerBound)
|
||||||
|
return (line.content, line.number, offset)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class Scanner {
|
class Scanner {
|
||||||
@@ -179,23 +195,6 @@ extension String {
|
|||||||
let last = findLastNot(character: character) ?? endIndex
|
let last = findLastNot(character: character) ?? endIndex
|
||||||
return String(self[first..<last])
|
return String(self[first..<last])
|
||||||
}
|
}
|
||||||
|
|
||||||
public func rangeLine(_ range: Range<String.Index>) -> RangeLine {
|
|
||||||
var lineNumber: UInt = 0
|
|
||||||
var offset: Int = 0
|
|
||||||
var lineContent = ""
|
|
||||||
|
|
||||||
for line in components(separatedBy: CharacterSet.newlines) {
|
|
||||||
lineNumber += 1
|
|
||||||
lineContent = line
|
|
||||||
if let rangeOfLine = self.range(of: line), rangeOfLine.contains(range.lowerBound) {
|
|
||||||
offset = distance(from: rangeOfLine.lowerBound, to: range.lowerBound)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (lineContent, lineNumber, offset)
|
public typealias ContentLocation = (content: String, lineNumber: UInt, lineOffset: Int)
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public typealias RangeLine = (content: String, number: UInt, offset: Int)
|
|
||||||
|
|||||||
@@ -125,9 +125,9 @@ public class TokenParser {
|
|||||||
}
|
}
|
||||||
// find offset of filter in the containing token so that only filter is highligted, not the whole token
|
// find offset of filter in the containing token so that only filter is highligted, not the whole token
|
||||||
if let filterTokenRange = containingToken.contents.range(of: filterToken) {
|
if let filterTokenRange = containingToken.contents.range(of: filterToken) {
|
||||||
var rangeLine = containingToken.sourceMap.line
|
var location = containingToken.sourceMap.location
|
||||||
rangeLine.offset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound)
|
location.lineOffset += containingToken.contents.distance(from: containingToken.contents.startIndex, to: filterTokenRange.lowerBound)
|
||||||
syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, line: rangeLine))
|
syntaxError.token = .variable(value: filterToken, at: SourceMap(filename: containingToken.sourceMap.filename, location: location))
|
||||||
} else {
|
} else {
|
||||||
syntaxError.token = containingToken
|
syntaxError.token = containingToken
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,17 +57,17 @@ extension String {
|
|||||||
|
|
||||||
public struct SourceMap: Equatable {
|
public struct SourceMap: Equatable {
|
||||||
public let filename: String?
|
public let filename: String?
|
||||||
public let line: RangeLine
|
public let location: ContentLocation
|
||||||
|
|
||||||
init(filename: String? = nil, line: RangeLine = ("", 0, 0)) {
|
init(filename: String? = nil, location: ContentLocation = ("", 0, 0)) {
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.line = line
|
self.location = location
|
||||||
}
|
}
|
||||||
|
|
||||||
static let unknown = SourceMap()
|
static let unknown = SourceMap()
|
||||||
|
|
||||||
public static func ==(lhs: SourceMap, rhs: SourceMap) -> Bool {
|
public static func ==(lhs: SourceMap, rhs: SourceMap) -> Bool {
|
||||||
return lhs.filename == rhs.filename && lhs.line == rhs.line
|
return lhs.filename == rhs.filename && lhs.location == rhs.location
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -44,8 +44,9 @@ func testEnvironment() {
|
|||||||
guard let range = template.templateString.range(of: token) else {
|
guard let range = template.templateString.range(of: token) else {
|
||||||
fatalError("Can't find '\(token)' in '\(template)'")
|
fatalError("Can't find '\(token)' in '\(template)'")
|
||||||
}
|
}
|
||||||
let rangeLine = template.templateString.rangeLine(range)
|
let lexer = Lexer(templateString: template.templateString)
|
||||||
let sourceMap = SourceMap(filename: template.name, line: rangeLine)
|
let location = lexer.rangeLocation(range)
|
||||||
|
let sourceMap = SourceMap(filename: template.name, location: location)
|
||||||
let token = Token.block(value: token, at: sourceMap)
|
let token = Token.block(value: token, at: sourceMap)
|
||||||
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
|
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -221,8 +221,9 @@ func testFilter() {
|
|||||||
guard let range = template.templateString.range(of: token) else {
|
guard let range = template.templateString.range(of: token) else {
|
||||||
fatalError("Can't find '\(token)' in '\(template)'")
|
fatalError("Can't find '\(token)' in '\(template)'")
|
||||||
}
|
}
|
||||||
let rangeLine = template.templateString.rangeLine(range)
|
let lexer = Lexer(templateString: template.templateString)
|
||||||
let sourceMap = SourceMap(filename: template.name, line: rangeLine)
|
let location = lexer.rangeLocation(range)
|
||||||
|
let sourceMap = SourceMap(filename: template.name, location: location)
|
||||||
let token = Token.block(value: token, at: sourceMap)
|
let token = Token.block(value: token, at: sourceMap)
|
||||||
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
|
return TemplateSyntaxError(reason: description, token: token, stackTrace: [])
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 1
|
try expect(tokens.count) == 1
|
||||||
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(line: ("Hello World", 1, 0)))
|
try expect(tokens.first) == .text(value: "Hello World", at: SourceMap(location: ("Hello World", 1, 0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize a comment") {
|
$0.it("can tokenize a comment") {
|
||||||
@@ -17,7 +17,7 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 1
|
try expect(tokens.count) == 1
|
||||||
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(line: ("{# Comment #}", 1, 3)))
|
try expect(tokens.first) == .comment(value: "Comment", at: SourceMap(location: ("{# Comment #}", 1, 3)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize a variable") {
|
$0.it("can tokenize a variable") {
|
||||||
@@ -25,7 +25,7 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 1
|
try expect(tokens.count) == 1
|
||||||
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(line: ("{{ Variable }}", 1, 3)))
|
try expect(tokens.first) == .variable(value: "Variable", at: SourceMap(location: ("{{ Variable }}", 1, 3)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize unclosed tag by ignoring it") {
|
$0.it("can tokenize unclosed tag by ignoring it") {
|
||||||
@@ -34,7 +34,7 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 1
|
try expect(tokens.count) == 1
|
||||||
try expect(tokens.first) == .text(value: "", at: SourceMap(line: ("{{ thing", 1, 0)))
|
try expect(tokens.first) == .text(value: "", at: SourceMap(location: ("{{ thing", 1, 0)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize a mixture of content") {
|
$0.it("can tokenize a mixture of content") {
|
||||||
@@ -43,9 +43,9 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 3
|
try expect(tokens.count) == 3
|
||||||
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is ")!)))
|
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is ")!)))
|
||||||
try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "myname")!)))
|
try expect(tokens[1]) == Token.variable(value: "myname", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "myname")!)))
|
||||||
try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!)))
|
try expect(tokens[2]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize two variables without being greedy") {
|
$0.it("can tokenize two variables without being greedy") {
|
||||||
@@ -54,8 +54,8 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 2
|
try expect(tokens.count) == 2
|
||||||
try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "thing")!)))
|
try expect(tokens[0]) == Token.variable(value: "thing", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "thing")!)))
|
||||||
try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name")!)))
|
try expect(tokens[1]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name")!)))
|
||||||
}
|
}
|
||||||
|
|
||||||
$0.it("can tokenize an unclosed block") {
|
$0.it("can tokenize an unclosed block") {
|
||||||
@@ -84,11 +84,11 @@ func testLexer() {
|
|||||||
let tokens = lexer.tokenize()
|
let tokens = lexer.tokenize()
|
||||||
|
|
||||||
try expect(tokens.count) == 5
|
try expect(tokens.count) == 5
|
||||||
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "My name is")!)))
|
try expect(tokens[0]) == Token.text(value: "My name is ", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "My name is")!)))
|
||||||
try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "{%")!)))
|
try expect(tokens[1]) == Token.block(value: "if name and name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "{%")!)))
|
||||||
try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "name", options: [.backwards])!)))
|
try expect(tokens[2]) == Token.variable(value: "name", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "name", options: [.backwards])!)))
|
||||||
try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(line: templateString.rangeLine(templateString.range(of: "endif")!)))
|
try expect(tokens[3]) == Token.block(value: "endif", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: "endif")!)))
|
||||||
try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(line: templateString.rangeLine(templateString.range(of: ".")!)))
|
try expect(tokens[4]) == Token.text(value: ".", at: SourceMap(location: lexer.rangeLocation(templateString.range(of: ".")!)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user