Rewrites scanner for better performance. This is primarily an improvement under Ubuntu
Cleanup readability a little bit Rewrite original scan function so it's available. Syntax improvements Fix deprecation warnings in Lexer Cleanup some syntax issues lexer t t
This commit is contained in:
@@ -7,6 +7,13 @@ struct Lexer {
|
|||||||
let templateString: String
|
let templateString: String
|
||||||
let lines: [Line]
|
let lines: [Line]
|
||||||
|
|
||||||
|
private static let tokenChars: [Unicode.Scalar] = ["{", "%", "#"]
|
||||||
|
private static let tokenCharMap: [Unicode.Scalar: Unicode.Scalar] = [
|
||||||
|
"{": "}",
|
||||||
|
"%": "%",
|
||||||
|
"#": "#"
|
||||||
|
]
|
||||||
|
|
||||||
init(templateName: String? = nil, templateString: String) {
|
init(templateName: String? = nil, templateString: String) {
|
||||||
self.templateName = templateName
|
self.templateName = templateName
|
||||||
self.templateString = templateString
|
self.templateString = templateString
|
||||||
@@ -20,9 +27,7 @@ struct Lexer {
|
|||||||
func createToken(string: String, at range: Range<String.Index>) -> Token {
|
func createToken(string: String, at range: Range<String.Index>) -> Token {
|
||||||
func strip() -> String {
|
func strip() -> String {
|
||||||
guard string.count > 4 else { return "" }
|
guard string.count > 4 else { return "" }
|
||||||
let start = string.index(string.startIndex, offsetBy: 2)
|
let trimmed = String(string.dropFirst(2).dropLast(2))
|
||||||
let end = string.index(string.endIndex, offsetBy: -2)
|
|
||||||
let trimmed = String(string[start..<end])
|
|
||||||
.components(separatedBy: "\n")
|
.components(separatedBy: "\n")
|
||||||
.filter({ !$0.isEmpty })
|
.filter({ !$0.isEmpty })
|
||||||
.map({ $0.trim(character: " ") })
|
.map({ $0.trim(character: " ") })
|
||||||
@@ -55,21 +60,14 @@ struct Lexer {
|
|||||||
var tokens: [Token] = []
|
var tokens: [Token] = []
|
||||||
|
|
||||||
let scanner = Scanner(templateString)
|
let scanner = Scanner(templateString)
|
||||||
|
|
||||||
let map = [
|
|
||||||
"{{": "}}",
|
|
||||||
"{%": "%}",
|
|
||||||
"{#": "#}",
|
|
||||||
]
|
|
||||||
|
|
||||||
while !scanner.isEmpty {
|
while !scanner.isEmpty {
|
||||||
if let text = scanner.scan(until: ["{{", "{%", "{#"]) {
|
if let (char, text) = scanner.scanForTokenStart(Lexer.tokenChars) {
|
||||||
if !text.1.isEmpty {
|
if !text.isEmpty {
|
||||||
tokens.append(createToken(string: text.1, at: scanner.range))
|
tokens.append(createToken(string: text, at: scanner.range))
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = map[text.0]!
|
guard let end = Lexer.tokenCharMap[char] else { continue }
|
||||||
let result = scanner.scan(until: end, returnUntil: true)
|
let result = scanner.scanForTokenEnd(end)
|
||||||
tokens.append(createToken(string: result, at: scanner.range))
|
tokens.append(createToken(string: result, at: scanner.range))
|
||||||
} else {
|
} else {
|
||||||
tokens.append(createToken(string: scanner.content, at: scanner.range))
|
tokens.append(createToken(string: scanner.content, at: scanner.range))
|
||||||
@@ -95,6 +93,9 @@ class Scanner {
|
|||||||
var content: String
|
var content: String
|
||||||
var range: Range<String.Index>
|
var range: Range<String.Index>
|
||||||
|
|
||||||
|
private static let tokenStartDelimiter: Unicode.Scalar = "{"
|
||||||
|
private static let tokenEndDelimiter: Unicode.Scalar = "}"
|
||||||
|
|
||||||
init(_ content: String) {
|
init(_ content: String) {
|
||||||
self.originalContent = content
|
self.originalContent = content
|
||||||
self.content = content
|
self.content = content
|
||||||
@@ -105,64 +106,43 @@ class Scanner {
|
|||||||
return content.isEmpty
|
return content.isEmpty
|
||||||
}
|
}
|
||||||
|
|
||||||
func scan(until: String, returnUntil: Bool = false) -> String {
|
func scanForTokenEnd(_ tokenChar: Unicode.Scalar) -> String {
|
||||||
var index = content.startIndex
|
var foundChar = false
|
||||||
|
|
||||||
if until.isEmpty {
|
for (index, char) in content.unicodeScalars.enumerated() {
|
||||||
return ""
|
if foundChar && char == Scanner.tokenEndDelimiter {
|
||||||
}
|
let result = String(content.prefix(index))
|
||||||
|
content = String(content.dropFirst(index + 1))
|
||||||
range = range.upperBound..<range.upperBound
|
range = range.upperBound..<originalContent.index(range.upperBound, offsetBy: index + 1)
|
||||||
while index != content.endIndex {
|
|
||||||
let substring = String(content[index...])
|
|
||||||
|
|
||||||
if substring.hasPrefix(until) {
|
|
||||||
let result = String(content[..<index])
|
|
||||||
|
|
||||||
if returnUntil {
|
|
||||||
range = range.lowerBound..<originalContent.index(range.upperBound, offsetBy: until.count)
|
|
||||||
content = String(substring[until.endIndex...])
|
|
||||||
return result + until
|
|
||||||
}
|
|
||||||
|
|
||||||
content = substring
|
|
||||||
return result
|
return result
|
||||||
|
} else {
|
||||||
|
foundChar = (char == tokenChar)
|
||||||
}
|
}
|
||||||
|
|
||||||
index = content.index(after: index)
|
|
||||||
range = range.lowerBound..<originalContent.index(after: range.upperBound)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
content = ""
|
content = ""
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func scan(until: [String]) -> (String, String)? {
|
func scanForTokenStart(_ tokenChars: [Unicode.Scalar]) -> (Unicode.Scalar, String)? {
|
||||||
if until.isEmpty {
|
var foundBrace = false
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var index = content.startIndex
|
|
||||||
range = range.upperBound..<range.upperBound
|
range = range.upperBound..<range.upperBound
|
||||||
while index != content.endIndex {
|
for (index, char) in content.unicodeScalars.enumerated() {
|
||||||
let substring = String(content[index...])
|
if foundBrace && tokenChars.contains(char) {
|
||||||
for string in until {
|
let result = String(content.prefix(index - 1))
|
||||||
if substring.hasPrefix(string) {
|
content = String(content.dropFirst(index - 1))
|
||||||
let result = String(content[..<index])
|
range = range.upperBound..<originalContent.index(range.upperBound, offsetBy: index - 1)
|
||||||
content = substring
|
return (char, result)
|
||||||
return (string, result)
|
} else {
|
||||||
|
foundBrace = (char == Scanner.tokenStartDelimiter)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
index = content.index(after: index)
|
|
||||||
range = range.lowerBound..<originalContent.index(after: range.upperBound)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
extension String {
|
extension String {
|
||||||
func findFirstNot(character: Character) -> String.Index? {
|
func findFirstNot(character: Character) -> String.Index? {
|
||||||
var index = startIndex
|
var index = startIndex
|
||||||
|
|||||||
Reference in New Issue
Block a user