feat: Allow tokens to be escaped

This commit is contained in:
T. R. Bernstein
2025-09-30 22:54:19 +02:00
parent 25d1507159
commit c25b7a52e7
3 changed files with 37 additions and 7 deletions

View File

@@ -62,7 +62,7 @@ struct Lexer {
/// - string: The content string of the token /// - string: The content string of the token
/// - range: The range within the template content, used for smart /// - range: The range within the template content, used for smart
/// error reporting /// error reporting
func createToken(string: String, at range: Range<String.Index>) -> Token { func createToken(string: String, at range: Range<String.Index>, _ isInEscapeMode: Bool = false) -> Token {
func strip(length: (Int, Int) = (Self.tagLength, Self.tagLength)) -> String { func strip(length: (Int, Int) = (Self.tagLength, Self.tagLength)) -> String {
guard string.count > (length.0 + length.1) else { return "" } guard string.count > (length.0 + length.1) else { return "" }
let trimmed = String(string.dropFirst(length.0).dropLast(length.1)) let trimmed = String(string.dropFirst(length.0).dropLast(length.1))
@@ -73,7 +73,7 @@ struct Lexer {
return trimmed return trimmed
} }
if string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#") { if !isInEscapeMode && (string.hasPrefix("{{") || string.hasPrefix("{%") || string.hasPrefix("{#")) {
let behaviour = string.hasPrefix("{%") ? behaviour(string: string, tagLength: Self.tagLength) : .unspecified let behaviour = string.hasPrefix("{%") ? behaviour(string: string, tagLength: Self.tagLength) : .unspecified
let stripLengths = ( let stripLengths = (
Self.tagLength + (behaviour.leading != .unspecified ? 1 : 0), Self.tagLength + (behaviour.leading != .unspecified ? 1 : 0),
@@ -108,14 +108,14 @@ struct Lexer {
let scanner = Scanner(templateString) let scanner = Scanner(templateString)
while !scanner.isEmpty { while !scanner.isEmpty {
if let (char, text) = scanner.scanForTokenStart(Self.tokenChars) { if let (char, text, isInEscapeMode) = scanner.scanForTokenStart(Self.tokenChars) {
if !text.isEmpty { if !text.isEmpty {
tokens.append(createToken(string: text, at: scanner.range)) tokens.append(createToken(string: text, at: scanner.range))
} }
guard let end = Self.tokenCharMap[char] else { continue } guard let end = Self.tokenCharMap[char] else { continue }
let result = scanner.scanForTokenEnd(end) let result = scanner.scanForTokenEnd(end)
tokens.append(createToken(string: result, at: scanner.range)) tokens.append(createToken(string: result, at: scanner.range, isInEscapeMode))
} else { } else {
tokens.append(createToken(string: scanner.content, at: scanner.range)) tokens.append(createToken(string: scanner.content, at: scanner.range))
scanner.content = "" scanner.content = ""
@@ -148,6 +148,7 @@ class Scanner {
private static let tokenStartDelimiter: Unicode.Scalar = "{" private static let tokenStartDelimiter: Unicode.Scalar = "{"
/// And the corresponding end delimiter for a token. /// And the corresponding end delimiter for a token.
private static let tokenEndDelimiter: Unicode.Scalar = "}" private static let tokenEndDelimiter: Unicode.Scalar = "}"
private static let tokenDelimiterEscape: Unicode.Scalar = "\\"
init(_ content: String) { init(_ content: String) {
self.originalContent = content self.originalContent = content
@@ -203,18 +204,25 @@ class Scanner {
/// - Parameter tokenChars: List of token start characters to search for. /// - Parameter tokenChars: List of token start characters to search for.
/// - Returns: The found token start character, together with the content /// - Returns: The found token start character, together with the content
/// before the token, or nil of no token start was found. /// before the token, or nil of no token start was found.
func scanForTokenStart(_ tokenChars: [Unicode.Scalar]) -> (Unicode.Scalar, String)? { // swiftlint:disable:next large_tuple
func scanForTokenStart(_ tokenChars: [Unicode.Scalar]) -> (Unicode.Scalar, String, Bool)? {
var foundBrace = false var foundBrace = false
var isInEscapeMode = false
var lastChar: Unicode.Scalar = " "
range = range.upperBound..<range.upperBound range = range.upperBound..<range.upperBound
for (index, char) in zip(0..., content.unicodeScalars) { for (index, char) in zip(0..., content.unicodeScalars) {
if foundBrace && tokenChars.contains(char) { if foundBrace && tokenChars.contains(char) {
let result = String(content.unicodeScalars.prefix(index - 1)) let prefixOffset = isInEscapeMode ? 1 : 0
let prefix = String(content.unicodeScalars.prefix(index - 1 - prefixOffset))
content = String(content.unicodeScalars.dropFirst(index - 1)) content = String(content.unicodeScalars.dropFirst(index - 1))
range = range.upperBound..<originalContent.unicodeScalars.index(range.upperBound, offsetBy: index - 1) range = range.upperBound..<originalContent.unicodeScalars.index(range.upperBound, offsetBy: index - 1)
return (char, result)
return (char, prefix, isInEscapeMode)
} else { } else {
foundBrace = (char == Self.tokenStartDelimiter) foundBrace = (char == Self.tokenStartDelimiter)
isInEscapeMode = (lastChar == Self.tokenDelimiterEscape)
lastChar = char
} }
} }

View File

@@ -20,6 +20,22 @@ final class LexerTests: XCTestCase {
try expect(tokens.first) == .comment(value: "Comment", at: makeSourceMap("Comment", for: lexer)) try expect(tokens.first) == .comment(value: "Comment", at: makeSourceMap("Comment", for: lexer))
} }
func testEscapedVariableToken() throws {
let lexer = Lexer(templateString: "\\{{ Variable }}")
let tokens = lexer.tokenize()
try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "{{ Variable }}", at: makeSourceMap("{{ Variable }}", for: lexer))
}
func testEscapedBehaviourToken() throws {
let lexer = Lexer(templateString: "\\{% Variable %}")
let tokens = lexer.tokenize()
try expect(tokens.count) == 1
try expect(tokens.first) == .text(value: "{% Variable %}", at: makeSourceMap("{% Variable %}", for: lexer))
}
func testVariable() throws { func testVariable() throws {
let lexer = Lexer(templateString: "{{ Variable }}") let lexer = Lexer(templateString: "{{ Variable }}")
let tokens = lexer.tokenize() let tokens = lexer.tokenize()

View File

@@ -15,5 +15,11 @@ final class TemplateTests: XCTestCase {
let result = try template.render([ "name": "Kyle" ]) let result = try template.render([ "name": "Kyle" ])
try expect(result) == "Hello World" try expect(result) == "Hello World"
} }
it("can render a template with escaped token") {
let template: Template = "Hello \\{{ name }}"
let result = try template.render([ "name": "Kyle" ])
try expect(result) == "Hello {{ name }}"
}
} }
} }