resolve template string

This commit is contained in:
Asaki Yuki 2026-01-17 18:04:48 +07:00
parent 80df04d3f6
commit a144909fbf
6 changed files with 246 additions and 91 deletions

View file

@ -1,12 +1,12 @@
import { makeToken, TokenKind, Token } from "./types.js"
import { makeToken, TokenKind, Token, TSToken, TSTokenKind } from "./types.js"
import * as Checker from "./Checker.js"
export function Lexer(input: string) {
export function Lexer(input: string, start: number = 0, end?: number) {
const tokens: Token[] = []
if (input.length === 0) return tokens
let index = 0
let index = start
do {
const token = input[index]
@ -43,31 +43,88 @@ export function Lexer(input: string) {
}
case "`": {
const start = index++,
struct: boolean[] = []
const tsTokens: TSToken[] = []
const start = index
do {
const token = input[index]
let lastStruct = struct.lastItem()
if (token === "`") {
if (struct.length) {
if (lastStruct === false) struct.pop()
else struct.push(false)
} else break
}
if (token === "$") {
if (input[index + 1] === "{" && !lastStruct) {
struct.push(true)
const tokenization = (start: number) => {
while (index < input.length) {
const char = input[index]
if (char === "`") {
index++
eatString()
} else if (char === "}") {
tsTokens.push({
kind: TSTokenKind.EXPRESSION,
tokens: Lexer(input, start + 1, index),
})
break
}
index++
}
}
if (token === "}" && lastStruct === true) struct.pop()
} while (++index < input.length)
const stringification = (start: number) => {
while (index < input.length) {
const char = input[index]
if (char === "`") {
if (start + 1 !== index)
tsTokens.push({
kind: TSTokenKind.STRING,
tokens: {
kind: TokenKind.STRING,
start: start + 1,
length: index - start + 1,
value: `'${input.slice(start + 1, index)}'`,
},
})
tokens.push(makeToken(input, TokenKind.TEMPLATE_STRING, start, index - start + 1))
break
} else if (char === "$" && input[index + 1] === "{") {
tsTokens.push({
kind: TSTokenKind.STRING,
tokens: {
value: `'${input.slice(start + 1, index)}'`,
kind: TokenKind.STRING,
length: index - start + 1,
start,
},
})
tokenization(++index)
start = index
}
index++
}
}
const eatString = () => {
while (index < input.length) {
const char = input[index]
if (char === "`") {
break
} else if (char === "$" && input[index + 1] === "{") {
index++
eatTemplate()
}
index++
}
}
const eatTemplate = () => {
while (index < input.length) {
const char = input[index]
if (char === "`") {
eatString()
} else if (char === "}") {
break
}
index++
}
}
stringification(index++)
tokens.push(makeToken(tsTokens, TokenKind.TEMPLATE_STRING, start, index - start + 1))
break
}
@ -117,10 +174,15 @@ export function Lexer(input: string) {
} else if (Checker.isWordChar(token)) {
while (Checker.isWordChar(input[index + 1])) index++
tokens.push(makeToken(input, TokenKind.WORD, start, index - start + 1))
} else if (!Checker.isBlankChar(token)) {
console.error(
`\x1b[31m${input.slice(0, index)}>>>${token}<<<${input.slice(index + 1)}\nInvalid character.\x1b[0m`,
)
throw new Error()
}
}
}
} while (++index < input.length)
} while (++index < (end || input.length))
return tokens
}