vanilla deepsearch

This commit is contained in:
Asaki Yuki 2026-01-18 01:59:02 +07:00
parent a144909fbf
commit ac412b798c
15 changed files with 725 additions and 78 deletions

View file

@ -0,0 +1,37 @@
import fs from "fs"
const data: any = JSON.parse(fs.readFileSync("cache/vanilla-defs.json", "utf-8"))
function toCamelCase(str: string) {
return str.replace(/[-_]\w/g, m => m[1].toUpperCase())
}
const type: string[] = []
const $$namespace: string[] = []
const intelliSense: string[] = ['import * as mc from "./elements.js"\n', "export type IntelliSense = {"]
for (const [namespace, element] of Object.entries(data)) {
if (namespace === "undefined") continue
$$namespace.push(`"${namespace}"`)
const $namespace = toCamelCase("_" + namespace)
const eType: string[] = []
for (const [ePath, info] of Object.entries(<any>element)) {
const { file, type, extend } = <any>info
eType.push(`"${ePath}"`)
}
intelliSense.push(` "${namespace}": mc.${$namespace},`)
type.push(`export type ${$namespace} = ${eType.join(" | ")};`)
}
intelliSense.push("}")
fs.writeFileSync(
"src/types/vanilla/elements.ts",
`export type Namespace = ${$$namespace.join(" | ")}\n\n` + type.join("\n"),
)
fs.writeFileSync("src/types/vanilla/intellisense.ts", intelliSense.join("\n"))

View file

@ -2,13 +2,13 @@ import JSONC from "jsonc-parser"
import fsp from "fs/promises"
import fs from "fs"
if (!fs.existsSync("prefetch")) fs.mkdirSync("prefetch")
if (!fs.existsSync("cache")) fs.mkdirSync("cache")
export const Github = {
readFile: async (user: string, repo: string, branches: string, path: string) => {
try {
return fetch(`https://raw.githubusercontent.com/${user}/${repo}/refs/heads/${branches}/${path}`).then(res =>
res.text()
res.text(),
)
} catch (error) {
console.error(error)
@ -21,7 +21,7 @@ export const PFFS = {
// Sync
readFile: (file: string) => {
try {
return fs.readFileSync(`prefetch/${file}`, "utf-8")
return fs.readFileSync(`cache/${file}`, "utf-8")
} catch (error) {
console.error(error)
process.exit(1)
@ -31,11 +31,11 @@ export const PFFS = {
writeFile: (file: string, data: string) => {
try {
file.split("/").reduce((a, b) => {
if (!fs.existsSync(a)) fs.mkdirSync(a)
return `prefetch/${a}/${b}`
if (!fs.existsSync("cache/" + a)) fs.mkdirSync("cache/" + a)
return `${a}/${b}`
})
return fsp.writeFile(`prefetch/${file}`, data)
return fsp.writeFile(`cache/${file}`, data)
} catch (error) {
console.error(error)
process.exit(1)

View file

@ -1,5 +1,32 @@
import { Github, PFFS } from "./components"
import { parse } from "jsonc-parser"
Github.readFile("KalmeMarq", "Bugrock-JSON-UI-Schemas", "main", "ui.schema.json").then(data => {
PFFS.writeFile("ui.schema.json", data)
})
const user = "mojang"
const project = "bedrock-samples"
const branches = process.argv.includes("--preview") ? "preview" : "main"
async function fetchfile(path: string) {
return parse(await Github.readFile(user, project, branches, path))
}
async function main() {
console.log("Prefetching...")
const data = await Github.readFile("KalmeMarq", "Bugrock-JSON-UI-Schemas", "main", "ui.schema.json")
await PFFS.writeFile("ui.schema.json", data)
console.log("ui.schema.json fetched!")
const { ui_defs } = await fetchfile("resource_pack/ui/_ui_defs.json")
await PFFS.writeFile("ui_defs.json", JSON.stringify(ui_defs, null, 2))
console.log("ui_defs.json fetched!")
let fetched = 0
await Promise.all(
ui_defs.map(async (path: string) => {
const data = await fetchfile("resource_pack/" + path)
await PFFS.writeFile(path, JSON.stringify(data, null, 2))
console.log(`[${++fetched}/${ui_defs.length}] ${path}`)
}),
)
}
main()

158
scripts/vanilladefs.ts Normal file
View file

@ -0,0 +1,158 @@
import fs from "fs"
const files: string[] = JSON.parse(fs.readFileSync("cache/ui_defs.json", "utf-8"))
const vanilla: NamespaceMap = new Map()
function readControls(namespace: string, file: string, elements: ElementMap, data: any[], prefix: string) {
prefix += "/"
for (const element of data) {
const [fullname, properties] = Object.entries(element)[0]
const data: VanillaElement = {
file,
type: (<any>properties).type,
}
const [name, $2] = fullname.split("@")
if (name.startsWith("$")) continue
if ($2 && !$2.startsWith("$")) {
const [$3, $4] = $2.split(".")
if ($4) {
data.extend = {
name: $4,
namespace: $3,
}
} else {
data.extend = {
name: $3,
namespace,
}
}
}
elements.set(prefix + name, data)
const controls = (<any>properties).controls
if (controls) {
readControls(namespace, file, elements, controls, prefix + name)
}
}
}
function readData(namespace: string, file: string, elements: ElementMap, data: any) {
for (const [fullname, properties] of Object.entries(data)) {
const [name, $2] = fullname.split("@")
const data: VanillaElement = {
file,
type: (<any>properties).type,
}
if ((<any>properties).anim_type) {
data.anim_type = (<any>properties).anim_type
}
// Register element
if ($2) {
const [$3, $4] = $2.split(".")
if ($4) {
data.extend = {
name: $4,
namespace: $3,
}
} else {
data.extend = {
name: $3,
namespace,
}
}
}
elements.set(name, data)
const controls = (<any>properties).controls
if (controls) {
readControls(namespace, file, elements, controls, name)
}
}
}
// Read
for (const file of files) {
const { namespace, ...data } = JSON.parse(fs.readFileSync("cache/" + file, "utf-8"))
let elements = vanilla.get(namespace)
if (!elements) {
elements = new Map<Namespace, VanillaElement>()
vanilla.set(namespace, elements)
}
readData(namespace, file, elements, data)
}
// Format
function getActualType(name: string, namespace: string) {
const e = vanilla.get(namespace)?.get(name)!
if (e?.anim_type) return null
if (e?.type) {
return e.type
} else {
if (e?.extend) {
return getActualType(e.extend.name, e.extend.namespace)
}
}
}
for (const [namespace, elements] of vanilla) {
for (const [name, element] of elements) {
if (element.extend) {
const type = getActualType(element.extend.name, element.extend.namespace)
if (type) {
element.type = type
elements.set(name, element)
} else if (type === null) {
vanilla.get(namespace)?.delete(name)
}
}
}
}
for (const [namespace, elements] of vanilla) {
for (const [name, element] of elements) {
if (element.anim_type) {
vanilla.get(namespace)?.delete(name)
}
}
}
const json: any = {}
for (const [namespace, elements] of vanilla) {
json[namespace] ||= {}
for (const [name, element] of elements) {
element.type ||= "unknown"
json[namespace][name] = element
}
}
fs.writeFileSync("cache/vanilla-defs.json", JSON.stringify(json, null, 4))
// Types
interface VanillaElement {
extend?: {
name: string
namespace: string
}
anim_type?: string
type: string
file: string
}
type Name = string
type Namespace = string
type ElementMap = Map<Name, VanillaElement>
type NamespaceMap = Map<Namespace, ElementMap>

View file

@ -11,6 +11,7 @@ for (const key in schema) {
const data = schema[key]
if (data.enum) {
const enumName = key.match(/\w+$/)?.[0].toCamelCase(true)!
console.log(enumName)
index.push(`export { ${enumName} } from "./${enumName}.js"`)
const count = new Map<string, number>()