Complete lexer implementation
This commit is contained in:
@@ -10,12 +10,54 @@ import Foundation
|
||||
|
||||
public struct Lexer {
|
||||
public let templateString:String
|
||||
let regex = NSRegularExpression(pattern: "(\\{\\{.*\\}\\}|\\{%.*%\\}|\\{#.*#\\})", options: nil, error: nil)
|
||||
|
||||
public init(templateString:String) {
|
||||
self.templateString = templateString
|
||||
}
|
||||
|
||||
func createToken(string:String) -> Token {
|
||||
func strip() -> String {
|
||||
return string[string.startIndex.successor().successor()..<string.endIndex.predecessor().predecessor()].stringByTrimmingCharactersInSet(NSCharacterSet.whitespaceCharacterSet())
|
||||
}
|
||||
|
||||
if string.hasPrefix("{{") {
|
||||
return Token.Variable(value: strip())
|
||||
} else if string.hasPrefix("{%") {
|
||||
return Token.Block(value: strip())
|
||||
} else if string.hasPrefix("{#") {
|
||||
return Token.Comment(value: strip())
|
||||
}
|
||||
|
||||
return Token.Text(value: string)
|
||||
}
|
||||
|
||||
public func tokenize() -> [Token] {
|
||||
return [Token.Text(value: templateString)]
|
||||
// Unfortunately NSRegularExpression doesn't have a split.
|
||||
// So here's a really terrible implementation
|
||||
|
||||
var tokens = [Token]()
|
||||
|
||||
let range = NSMakeRange(0, countElements(templateString))
|
||||
var lastIndex = 0
|
||||
let nsTemplateString = templateString as NSString
|
||||
regex.enumerateMatchesInString(templateString, options: nil, range: range) { (result, flags, b) in
|
||||
if result.range.location != lastIndex {
|
||||
let previousMatch = nsTemplateString.substringWithRange(NSMakeRange(lastIndex, result.range.location))
|
||||
tokens.append(self.createToken(previousMatch))
|
||||
}
|
||||
|
||||
let match = nsTemplateString.substringWithRange(result.range)
|
||||
tokens.append(self.createToken(match))
|
||||
|
||||
lastIndex = result.range.location + result.range.length
|
||||
}
|
||||
|
||||
if lastIndex < countElements(templateString) {
|
||||
let substring = (templateString as NSString).substringFromIndex(lastIndex)
|
||||
tokens.append(Token.Text(value: substring))
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,4 +20,30 @@ class LexerTests: XCTestCase {
|
||||
XCTAssertEqual(tokens.first!, Token.Text(value: "Hello World"))
|
||||
}
|
||||
|
||||
func testTokenizeComment() {
|
||||
let lexer = Lexer(templateString:"{# Comment #}")
|
||||
let tokens = lexer.tokenize()
|
||||
|
||||
XCTAssertEqual(tokens.count, 1)
|
||||
XCTAssertEqual(tokens.first!, Token.Comment(value: "Comment"))
|
||||
}
|
||||
|
||||
func testTokenizeVariable() {
|
||||
let lexer = Lexer(templateString:"{{ Variable }}")
|
||||
let tokens = lexer.tokenize()
|
||||
|
||||
XCTAssertEqual(tokens.count, 1)
|
||||
XCTAssertEqual(tokens.first!, Token.Variable(value: "Variable"))
|
||||
}
|
||||
|
||||
func testTokenizeMixture() {
|
||||
let lexer = Lexer(templateString:"My name is {{ name }}.")
|
||||
let tokens = lexer.tokenize()
|
||||
|
||||
XCTAssertEqual(tokens.count, 3)
|
||||
XCTAssertEqual(tokens[0], Token.Text(value: "My name is "))
|
||||
XCTAssertEqual(tokens[1], Token.Variable(value: "name"))
|
||||
XCTAssertEqual(tokens[2], Token.Text(value: "."))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user