mirror of
https://github.com/GeWuYou/GFramework.git
synced 2026-04-03 04:14:30 +08:00
feat(game): 添加游戏内容配置系统和VS Code扩展
- 引入基于YAML和JSON Schema的静态内容配置系统 - 实现运行时只读查询和Source Generator支持 - 提供VS Code扩展用于配置浏览、验证和轻量编辑 - 支持开发期热重载和跨表引用校验功能 - 包含完整的文档说明和工具链集成
This commit is contained in:
parent
3332aaff7b
commit
e8d0ea2daf
@ -143,7 +143,7 @@ var hotReload = loader.EnableHotReload(
|
|||||||
- 浏览 `config/` 目录
|
- 浏览 `config/` 目录
|
||||||
- 打开 raw YAML 文件
|
- 打开 raw YAML 文件
|
||||||
- 打开匹配的 schema 文件
|
- 打开匹配的 schema 文件
|
||||||
- 对必填字段和基础标量类型做轻量校验
|
- 对必填字段、未知顶层字段、基础标量类型和标量数组元素做轻量校验
|
||||||
- 对顶层标量字段提供轻量表单入口
|
- 对顶层标量字段提供轻量表单入口
|
||||||
|
|
||||||
当前仍建议把复杂数组、嵌套对象和批量修改放在 raw YAML 中完成。
|
当前仍建议把复杂数组、嵌套对象和批量修改放在 raw YAML 中完成。
|
||||||
|
|||||||
@ -7,9 +7,27 @@ Minimal VS Code extension scaffold for the GFramework AI-First config workflow.
|
|||||||
- Browse config files from the workspace `config/` directory
|
- Browse config files from the workspace `config/` directory
|
||||||
- Open raw YAML files
|
- Open raw YAML files
|
||||||
- Open matching schema files from `schemas/`
|
- Open matching schema files from `schemas/`
|
||||||
- Run lightweight schema validation for required fields and simple scalar types
|
- Run lightweight schema validation for required fields, unknown top-level fields, scalar types, and scalar array items
|
||||||
- Open a lightweight form preview for top-level scalar fields
|
- Open a lightweight form preview for top-level scalar fields
|
||||||
|
|
||||||
|
## Validation Coverage
|
||||||
|
|
||||||
|
The extension currently validates the repository's minimal config-schema subset:
|
||||||
|
|
||||||
|
- required top-level properties
|
||||||
|
- unknown top-level properties
|
||||||
|
- scalar compatibility for `integer`, `number`, `boolean`, and `string`
|
||||||
|
- top-level scalar arrays with scalar item type checks
|
||||||
|
|
||||||
|
Nested objects and complex arrays should still be reviewed in raw YAML.
|
||||||
|
|
||||||
|
## Local Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd tools/vscode-config-extension
|
||||||
|
node --test ./test/*.test.js
|
||||||
|
```
|
||||||
|
|
||||||
## Current Constraints
|
## Current Constraints
|
||||||
|
|
||||||
- Multi-root workspaces use the first workspace folder
|
- Multi-root workspaces use the first workspace folder
|
||||||
|
|||||||
@ -20,6 +20,9 @@
|
|||||||
"onCommand:gframeworkConfig.validateAll"
|
"onCommand:gframeworkConfig.validateAll"
|
||||||
],
|
],
|
||||||
"main": "./src/extension.js",
|
"main": "./src/extension.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "node --test ./test/*.test.js"
|
||||||
|
},
|
||||||
"contributes": {
|
"contributes": {
|
||||||
"views": {
|
"views": {
|
||||||
"explorer": [
|
"explorer": [
|
||||||
|
|||||||
340
tools/vscode-config-extension/src/configValidation.js
Normal file
340
tools/vscode-config-extension/src/configValidation.js
Normal file
@ -0,0 +1,340 @@
|
|||||||
|
/**
|
||||||
|
* Parse a minimal JSON schema document used by the config extension.
|
||||||
|
* The parser intentionally supports the same schema subset that the current
|
||||||
|
* runtime validator and source generator depend on.
|
||||||
|
*
|
||||||
|
* @param {string} content Raw schema JSON text.
|
||||||
|
* @returns {{required: string[], properties: Record<string, {type: string, itemType?: string}>}} Parsed schema info.
|
||||||
|
*/
|
||||||
|
function parseSchemaContent(content) {
|
||||||
|
const parsed = JSON.parse(content);
|
||||||
|
const required = Array.isArray(parsed.required)
|
||||||
|
? parsed.required.filter((value) => typeof value === "string")
|
||||||
|
: [];
|
||||||
|
const properties = {};
|
||||||
|
const propertyBag = parsed.properties || {};
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(propertyBag)) {
|
||||||
|
if (!value || typeof value !== "object" || typeof value.type !== "string") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.type === "array" &&
|
||||||
|
value.items &&
|
||||||
|
typeof value.items === "object" &&
|
||||||
|
typeof value.items.type === "string") {
|
||||||
|
properties[key] = {
|
||||||
|
type: "array",
|
||||||
|
itemType: value.items.type
|
||||||
|
};
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
properties[key] = {
|
||||||
|
type: value.type
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
required,
|
||||||
|
properties
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a minimal top-level YAML structure for config validation and form
|
||||||
|
* preview. This parser intentionally focuses on the repository's current
|
||||||
|
* config conventions: one root mapping object per file, top-level scalar
|
||||||
|
* fields, and top-level scalar arrays.
|
||||||
|
*
|
||||||
|
* @param {string} text YAML text.
|
||||||
|
* @returns {{entries: Map<string, {kind: string, value?: string, items?: Array<{raw: string, isComplex: boolean}>}>, keys: Set<string>}} Parsed YAML.
|
||||||
|
*/
|
||||||
|
function parseTopLevelYaml(text) {
|
||||||
|
const entries = new Map();
|
||||||
|
const keys = new Set();
|
||||||
|
const lines = text.split(/\r?\n/u);
|
||||||
|
|
||||||
|
for (let index = 0; index < lines.length; index += 1) {
|
||||||
|
const line = lines[index];
|
||||||
|
if (!line || line.trim().length === 0 || line.trim().startsWith("#")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (/^\s/u.test(line)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = /^([A-Za-z0-9_]+):(?:\s*(.*))?$/u.exec(line);
|
||||||
|
if (!match) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = match[1];
|
||||||
|
const rawValue = match[2] || "";
|
||||||
|
keys.add(key);
|
||||||
|
|
||||||
|
if (rawValue.length > 0 && !rawValue.startsWith("|") && !rawValue.startsWith(">")) {
|
||||||
|
entries.set(key, {
|
||||||
|
kind: "scalar",
|
||||||
|
value: rawValue.trim()
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const childLines = [];
|
||||||
|
let cursor = index + 1;
|
||||||
|
while (cursor < lines.length) {
|
||||||
|
const childLine = lines[cursor];
|
||||||
|
if (childLine.trim().length === 0 || childLine.trim().startsWith("#")) {
|
||||||
|
cursor += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^\s/u.test(childLine)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
childLines.push(childLine);
|
||||||
|
cursor += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (childLines.length === 0) {
|
||||||
|
entries.set(key, {
|
||||||
|
kind: "empty"
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayItems = parseTopLevelArray(childLines);
|
||||||
|
if (arrayItems) {
|
||||||
|
entries.set(key, {
|
||||||
|
kind: "array",
|
||||||
|
items: arrayItems
|
||||||
|
});
|
||||||
|
index = cursor - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
entries.set(key, {
|
||||||
|
kind: "object"
|
||||||
|
});
|
||||||
|
index = cursor - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
entries,
|
||||||
|
keys
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Produce extension-facing validation diagnostics from schema and parsed YAML.
|
||||||
|
*
|
||||||
|
* @param {{required: string[], properties: Record<string, {type: string, itemType?: string}>}} schemaInfo Parsed schema info.
|
||||||
|
* @param {{entries: Map<string, {kind: string, value?: string, items?: Array<{raw: string, isComplex: boolean}>}>, keys: Set<string>}} parsedYaml Parsed YAML.
|
||||||
|
* @returns {Array<{severity: "error" | "warning", message: string}>} Validation diagnostics.
|
||||||
|
*/
|
||||||
|
function validateParsedConfig(schemaInfo, parsedYaml) {
|
||||||
|
const diagnostics = [];
|
||||||
|
|
||||||
|
for (const requiredProperty of schemaInfo.required) {
|
||||||
|
if (!parsedYaml.keys.has(requiredProperty)) {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Required property '${requiredProperty}' is missing.`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of parsedYaml.keys) {
|
||||||
|
if (!Object.prototype.hasOwnProperty.call(schemaInfo.properties, key)) {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Property '${key}' is not declared in the matching schema.`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [propertyName, propertySchema] of Object.entries(schemaInfo.properties)) {
|
||||||
|
if (!parsedYaml.entries.has(propertyName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry = parsedYaml.entries.get(propertyName);
|
||||||
|
if (propertySchema.type === "array") {
|
||||||
|
if (entry.kind !== "array") {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Property '${propertyName}' is expected to be an array.`
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const item of entry.items || []) {
|
||||||
|
if (item.isComplex || !isScalarCompatible(propertySchema.itemType || "", item.raw)) {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Array item in property '${propertyName}' is expected to be '${propertySchema.itemType}', but the current value is incompatible.`
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entry.kind !== "scalar") {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Property '${propertyName}' is expected to be '${propertySchema.type}', but the current YAML shape is '${entry.kind}'.`
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isScalarCompatible(propertySchema.type, entry.value || "")) {
|
||||||
|
diagnostics.push({
|
||||||
|
severity: "error",
|
||||||
|
message: `Property '${propertyName}' is expected to be '${propertySchema.type}', but the current scalar value is incompatible.`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return diagnostics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine whether a scalar value matches a minimal schema type.
|
||||||
|
*
|
||||||
|
* @param {string} expectedType Schema type.
|
||||||
|
* @param {string} scalarValue YAML scalar value.
|
||||||
|
* @returns {boolean} True when compatible.
|
||||||
|
*/
|
||||||
|
function isScalarCompatible(expectedType, scalarValue) {
|
||||||
|
const value = unquoteScalar(scalarValue);
|
||||||
|
switch (expectedType) {
|
||||||
|
case "integer":
|
||||||
|
return /^-?\d+$/u.test(value);
|
||||||
|
case "number":
|
||||||
|
return /^-?\d+(?:\.\d+)?$/u.test(value);
|
||||||
|
case "boolean":
|
||||||
|
return /^(true|false)$/iu.test(value);
|
||||||
|
case "string":
|
||||||
|
return true;
|
||||||
|
default:
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply scalar field updates back into the original YAML text.
|
||||||
|
*
|
||||||
|
* @param {string} originalYaml Original YAML content.
|
||||||
|
* @param {Record<string, string>} updates Updated scalar values.
|
||||||
|
* @returns {string} Updated YAML content.
|
||||||
|
*/
|
||||||
|
function applyScalarUpdates(originalYaml, updates) {
|
||||||
|
const lines = originalYaml.split(/\r?\n/u);
|
||||||
|
const touched = new Set();
|
||||||
|
|
||||||
|
const updatedLines = lines.map((line) => {
|
||||||
|
if (/^\s/u.test(line)) {
|
||||||
|
return line;
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = /^([A-Za-z0-9_]+):(?:\s*(.*))?$/u.exec(line);
|
||||||
|
if (!match) {
|
||||||
|
return line;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = match[1];
|
||||||
|
if (!Object.prototype.hasOwnProperty.call(updates, key)) {
|
||||||
|
return line;
|
||||||
|
}
|
||||||
|
|
||||||
|
touched.add(key);
|
||||||
|
return `${key}: ${formatYamlScalar(updates[key])}`;
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(updates)) {
|
||||||
|
if (touched.has(key)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
updatedLines.push(`${key}: ${formatYamlScalar(value)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return updatedLines.join("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a scalar value for YAML output.
|
||||||
|
*
|
||||||
|
* @param {string} value Scalar value.
|
||||||
|
* @returns {string} YAML-ready scalar.
|
||||||
|
*/
|
||||||
|
function formatYamlScalar(value) {
|
||||||
|
if (/^-?\d+(?:\.\d+)?$/u.test(value) || /^(true|false)$/iu.test(value)) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.length === 0 || /[:#\[\]\{\},]|^\s|\s$/u.test(value)) {
|
||||||
|
return JSON.stringify(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a simple YAML string quote wrapper.
|
||||||
|
*
|
||||||
|
* @param {string} value Scalar value.
|
||||||
|
* @returns {string} Unquoted value.
|
||||||
|
*/
|
||||||
|
function unquoteScalar(value) {
|
||||||
|
if ((value.startsWith("\"") && value.endsWith("\"")) ||
|
||||||
|
(value.startsWith("'") && value.endsWith("'"))) {
|
||||||
|
return value.slice(1, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a sequence of child lines as a top-level scalar array.
|
||||||
|
*
|
||||||
|
* @param {string[]} childLines Indented child lines.
|
||||||
|
* @returns {Array<{raw: string, isComplex: boolean}> | null} Parsed array items or null when the block is not an array.
|
||||||
|
*/
|
||||||
|
function parseTopLevelArray(childLines) {
|
||||||
|
const items = [];
|
||||||
|
|
||||||
|
for (const line of childLines) {
|
||||||
|
if (line.trim().length === 0 || line.trim().startsWith("#")) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const trimmed = line.trimStart();
|
||||||
|
if (!trimmed.startsWith("-")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const raw = trimmed.slice(1).trim();
|
||||||
|
items.push({
|
||||||
|
raw,
|
||||||
|
isComplex: raw.length === 0 || raw.startsWith("{") || raw.startsWith("[") || /^[A-Za-z0-9_]+:\s*/u.test(raw)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
applyScalarUpdates,
|
||||||
|
formatYamlScalar,
|
||||||
|
isScalarCompatible,
|
||||||
|
parseSchemaContent,
|
||||||
|
parseTopLevelYaml,
|
||||||
|
unquoteScalar,
|
||||||
|
validateParsedConfig
|
||||||
|
};
|
||||||
@ -1,6 +1,13 @@
|
|||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const vscode = require("vscode");
|
const vscode = require("vscode");
|
||||||
|
const {
|
||||||
|
applyScalarUpdates,
|
||||||
|
parseSchemaContent,
|
||||||
|
parseTopLevelYaml,
|
||||||
|
unquoteScalar,
|
||||||
|
validateParsedConfig
|
||||||
|
} = require("./configValidation");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Activate the GFramework config extension.
|
* Activate the GFramework config extension.
|
||||||
@ -342,27 +349,13 @@ async function validateConfigFile(configUri, diagnostics) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const requiredProperty of schemaInfo.required) {
|
for (const diagnostic of validateParsedConfig(schemaInfo, parsedYaml)) {
|
||||||
if (!parsedYaml.keys.has(requiredProperty)) {
|
|
||||||
fileDiagnostics.push(new vscode.Diagnostic(
|
fileDiagnostics.push(new vscode.Diagnostic(
|
||||||
new vscode.Range(0, 0, 0, 1),
|
new vscode.Range(0, 0, 0, 1),
|
||||||
`Required property '${requiredProperty}' is missing.`,
|
diagnostic.message,
|
||||||
vscode.DiagnosticSeverity.Error));
|
diagnostic.severity === "error"
|
||||||
}
|
? vscode.DiagnosticSeverity.Error
|
||||||
}
|
: vscode.DiagnosticSeverity.Warning));
|
||||||
|
|
||||||
for (const [propertyName, expectedType] of Object.entries(schemaInfo.propertyTypes)) {
|
|
||||||
if (!parsedYaml.scalars.has(propertyName)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const scalarValue = parsedYaml.scalars.get(propertyName);
|
|
||||||
if (!isScalarCompatible(expectedType, scalarValue)) {
|
|
||||||
fileDiagnostics.push(new vscode.Diagnostic(
|
|
||||||
new vscode.Range(0, 0, 0, 1),
|
|
||||||
`Property '${propertyName}' is expected to be '${expectedType}', but the current scalar value is incompatible.`,
|
|
||||||
vscode.DiagnosticSeverity.Warning));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
diagnostics.set(configUri, fileDiagnostics);
|
diagnostics.set(configUri, fileDiagnostics);
|
||||||
@ -373,7 +366,7 @@ async function validateConfigFile(configUri, diagnostics) {
|
|||||||
*
|
*
|
||||||
* @param {vscode.Uri} configUri Config file URI.
|
* @param {vscode.Uri} configUri Config file URI.
|
||||||
* @param {vscode.WorkspaceFolder} workspaceRoot Workspace root.
|
* @param {vscode.WorkspaceFolder} workspaceRoot Workspace root.
|
||||||
* @returns {Promise<{exists: boolean, schemaPath: string, required: string[], propertyTypes: Record<string, string>}>} Schema info.
|
* @returns {Promise<{exists: boolean, schemaPath: string, required: string[], properties: Record<string, {type: string, itemType?: string}>}>} Schema info.
|
||||||
*/
|
*/
|
||||||
async function loadSchemaInfoForConfig(configUri, workspaceRoot) {
|
async function loadSchemaInfoForConfig(configUri, workspaceRoot) {
|
||||||
const schemaUri = getSchemaUriForConfigFile(configUri, workspaceRoot);
|
const schemaUri = getSchemaUriForConfigFile(configUri, workspaceRoot);
|
||||||
@ -383,144 +376,44 @@ async function loadSchemaInfoForConfig(configUri, workspaceRoot) {
|
|||||||
exists: false,
|
exists: false,
|
||||||
schemaPath,
|
schemaPath,
|
||||||
required: [],
|
required: [],
|
||||||
propertyTypes: {}
|
properties: {}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = await fs.promises.readFile(schemaUri.fsPath, "utf8");
|
const content = await fs.promises.readFile(schemaUri.fsPath, "utf8");
|
||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(content);
|
const parsed = parseSchemaContent(content);
|
||||||
const required = Array.isArray(parsed.required)
|
|
||||||
? parsed.required.filter((value) => typeof value === "string")
|
|
||||||
: [];
|
|
||||||
const propertyTypes = {};
|
|
||||||
const properties = parsed.properties || {};
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(properties)) {
|
|
||||||
if (!value || typeof value !== "object") {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof value.type === "string") {
|
|
||||||
propertyTypes[key] = value.type;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
exists: true,
|
exists: true,
|
||||||
schemaPath,
|
schemaPath,
|
||||||
required,
|
required: parsed.required,
|
||||||
propertyTypes
|
properties: parsed.properties
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return {
|
return {
|
||||||
exists: false,
|
exists: false,
|
||||||
schemaPath,
|
schemaPath,
|
||||||
required: [],
|
required: [],
|
||||||
propertyTypes: {}
|
properties: {}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse top-level YAML keys and scalar values.
|
|
||||||
* This intentionally supports only the MVP subset needed for lightweight form
|
|
||||||
* preview and validation.
|
|
||||||
*
|
|
||||||
* @param {string} text YAML text.
|
|
||||||
* @returns {{keys: Set<string>, scalars: Map<string, string>}} Parsed shape.
|
|
||||||
*/
|
|
||||||
function parseTopLevelYaml(text) {
|
|
||||||
const keys = new Set();
|
|
||||||
const scalars = new Map();
|
|
||||||
const lines = text.split(/\r?\n/u);
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
if (!line || line.trim().length === 0 || line.trim().startsWith("#")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (/^\s/u.test(line)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const match = /^([A-Za-z0-9_]+):(?:\s*(.*))?$/u.exec(line);
|
|
||||||
if (!match) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = match[1];
|
|
||||||
const rawValue = match[2] || "";
|
|
||||||
keys.add(key);
|
|
||||||
|
|
||||||
if (rawValue.length === 0) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rawValue.startsWith("|") || rawValue.startsWith(">")) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
scalars.set(key, rawValue.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
return {keys, scalars};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply scalar field updates back into the original YAML text.
|
|
||||||
*
|
|
||||||
* @param {string} originalYaml Original YAML content.
|
|
||||||
* @param {Record<string, string>} updates Updated scalar values.
|
|
||||||
* @returns {string} Updated YAML content.
|
|
||||||
*/
|
|
||||||
function applyScalarUpdates(originalYaml, updates) {
|
|
||||||
const lines = originalYaml.split(/\r?\n/u);
|
|
||||||
const touched = new Set();
|
|
||||||
|
|
||||||
const updatedLines = lines.map((line) => {
|
|
||||||
if (/^\s/u.test(line)) {
|
|
||||||
return line;
|
|
||||||
}
|
|
||||||
|
|
||||||
const match = /^([A-Za-z0-9_]+):(?:\s*(.*))?$/u.exec(line);
|
|
||||||
if (!match) {
|
|
||||||
return line;
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = match[1];
|
|
||||||
if (!Object.prototype.hasOwnProperty.call(updates, key)) {
|
|
||||||
return line;
|
|
||||||
}
|
|
||||||
|
|
||||||
touched.add(key);
|
|
||||||
return `${key}: ${formatYamlScalar(updates[key])}`;
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(updates)) {
|
|
||||||
if (touched.has(key)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
updatedLines.push(`${key}: ${formatYamlScalar(value)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return updatedLines.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Render the form-preview webview HTML.
|
* Render the form-preview webview HTML.
|
||||||
*
|
*
|
||||||
* @param {string} fileName File name.
|
* @param {string} fileName File name.
|
||||||
* @param {{exists: boolean, schemaPath: string, required: string[], propertyTypes: Record<string, string>}} schemaInfo Schema info.
|
* @param {{exists: boolean, schemaPath: string, required: string[], properties: Record<string, {type: string, itemType?: string}>}} schemaInfo Schema info.
|
||||||
* @param {{keys: Set<string>, scalars: Map<string, string>}} parsedYaml Parsed YAML data.
|
* @param {{entries: Map<string, {kind: string, value?: string, items?: Array<{raw: string, isComplex: boolean}>}>, keys: Set<string>}} parsedYaml Parsed YAML data.
|
||||||
* @returns {string} HTML string.
|
* @returns {string} HTML string.
|
||||||
*/
|
*/
|
||||||
function renderFormHtml(fileName, schemaInfo, parsedYaml) {
|
function renderFormHtml(fileName, schemaInfo, parsedYaml) {
|
||||||
const fields = Array.from(parsedYaml.scalars.entries())
|
const fields = Array.from(parsedYaml.entries.entries())
|
||||||
.map(([key, value]) => {
|
.filter(([, entry]) => entry.kind === "scalar")
|
||||||
|
.map(([key, entry]) => {
|
||||||
const escapedKey = escapeHtml(key);
|
const escapedKey = escapeHtml(key);
|
||||||
const escapedValue = escapeHtml(unquoteScalar(value));
|
const escapedValue = escapeHtml(unquoteScalar(entry.value || ""));
|
||||||
const required = schemaInfo.required.includes(key) ? "<span class=\"badge\">required</span>" : "";
|
const required = schemaInfo.required.includes(key) ? "<span class=\"badge\">required</span>" : "";
|
||||||
return `
|
return `
|
||||||
<label class="field">
|
<label class="field">
|
||||||
@ -622,62 +515,6 @@ function renderFormHtml(fileName, schemaInfo, parsedYaml) {
|
|||||||
</html>`;
|
</html>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Determine whether a scalar value matches a minimal schema type.
|
|
||||||
*
|
|
||||||
* @param {string} expectedType Schema type.
|
|
||||||
* @param {string} scalarValue YAML scalar value.
|
|
||||||
* @returns {boolean} True when compatible.
|
|
||||||
*/
|
|
||||||
function isScalarCompatible(expectedType, scalarValue) {
|
|
||||||
const value = unquoteScalar(scalarValue);
|
|
||||||
switch (expectedType) {
|
|
||||||
case "integer":
|
|
||||||
return /^-?\d+$/u.test(value);
|
|
||||||
case "number":
|
|
||||||
return /^-?\d+(?:\.\d+)?$/u.test(value);
|
|
||||||
case "boolean":
|
|
||||||
return /^(true|false)$/iu.test(value);
|
|
||||||
case "string":
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Format a scalar value for YAML output.
|
|
||||||
*
|
|
||||||
* @param {string} value Scalar value.
|
|
||||||
* @returns {string} YAML-ready scalar.
|
|
||||||
*/
|
|
||||||
function formatYamlScalar(value) {
|
|
||||||
if (/^-?\d+(?:\.\d+)?$/u.test(value) || /^(true|false)$/iu.test(value)) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value.length === 0 || /[:#\[\]\{\},]|^\s|\s$/u.test(value)) {
|
|
||||||
return JSON.stringify(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove a simple YAML string quote wrapper.
|
|
||||||
*
|
|
||||||
* @param {string} value Scalar value.
|
|
||||||
* @returns {string} Unquoted value.
|
|
||||||
*/
|
|
||||||
function unquoteScalar(value) {
|
|
||||||
if ((value.startsWith("\"") && value.endsWith("\"")) ||
|
|
||||||
(value.startsWith("'") && value.endsWith("'"))) {
|
|
||||||
return value.slice(1, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enumerate all YAML files recursively.
|
* Enumerate all YAML files recursively.
|
||||||
*
|
*
|
||||||
|
|||||||
111
tools/vscode-config-extension/test/configValidation.test.js
Normal file
111
tools/vscode-config-extension/test/configValidation.test.js
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
const test = require("node:test");
|
||||||
|
const assert = require("node:assert/strict");
|
||||||
|
const {
|
||||||
|
applyScalarUpdates,
|
||||||
|
parseSchemaContent,
|
||||||
|
parseTopLevelYaml,
|
||||||
|
validateParsedConfig
|
||||||
|
} = require("../src/configValidation");
|
||||||
|
|
||||||
|
test("parseSchemaContent should capture scalar and array property metadata", () => {
|
||||||
|
const schema = parseSchemaContent(`
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"required": ["id", "name"],
|
||||||
|
"properties": {
|
||||||
|
"id": { "type": "integer" },
|
||||||
|
"name": { "type": "string" },
|
||||||
|
"dropRates": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "integer" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.deepEqual(schema.required, ["id", "name"]);
|
||||||
|
assert.deepEqual(schema.properties, {
|
||||||
|
id: {type: "integer"},
|
||||||
|
name: {type: "string"},
|
||||||
|
dropRates: {type: "array", itemType: "integer"}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("validateParsedConfig should report missing and unknown properties", () => {
|
||||||
|
const schema = parseSchemaContent(`
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"required": ["id", "name"],
|
||||||
|
"properties": {
|
||||||
|
"id": { "type": "integer" },
|
||||||
|
"name": { "type": "string" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
const yaml = parseTopLevelYaml(`
|
||||||
|
id: 1
|
||||||
|
title: Slime
|
||||||
|
`);
|
||||||
|
|
||||||
|
const diagnostics = validateParsedConfig(schema, yaml);
|
||||||
|
|
||||||
|
assert.equal(diagnostics.length, 2);
|
||||||
|
assert.equal(diagnostics[0].severity, "error");
|
||||||
|
assert.match(diagnostics[0].message, /name/u);
|
||||||
|
assert.equal(diagnostics[1].severity, "error");
|
||||||
|
assert.match(diagnostics[1].message, /title/u);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("validateParsedConfig should report array item type mismatches", () => {
|
||||||
|
const schema = parseSchemaContent(`
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"dropRates": {
|
||||||
|
"type": "array",
|
||||||
|
"items": { "type": "integer" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
const yaml = parseTopLevelYaml(`
|
||||||
|
dropRates:
|
||||||
|
- 1
|
||||||
|
- potion
|
||||||
|
`);
|
||||||
|
|
||||||
|
const diagnostics = validateParsedConfig(schema, yaml);
|
||||||
|
|
||||||
|
assert.equal(diagnostics.length, 1);
|
||||||
|
assert.equal(diagnostics[0].severity, "error");
|
||||||
|
assert.match(diagnostics[0].message, /dropRates/u);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("parseTopLevelYaml should classify nested mappings as object entries", () => {
|
||||||
|
const yaml = parseTopLevelYaml(`
|
||||||
|
reward:
|
||||||
|
gold: 10
|
||||||
|
name: Slime
|
||||||
|
`);
|
||||||
|
|
||||||
|
assert.equal(yaml.entries.get("reward").kind, "object");
|
||||||
|
assert.equal(yaml.entries.get("name").kind, "scalar");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("applyScalarUpdates should update top-level scalars and append new keys", () => {
|
||||||
|
const updated = applyScalarUpdates(
|
||||||
|
[
|
||||||
|
"id: 1",
|
||||||
|
"name: Slime",
|
||||||
|
"dropRates:",
|
||||||
|
" - 1"
|
||||||
|
].join("\n"),
|
||||||
|
{
|
||||||
|
name: "Goblin",
|
||||||
|
hp: "25"
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.match(updated, /^name: Goblin$/mu);
|
||||||
|
assert.match(updated, /^hp: 25$/mu);
|
||||||
|
assert.match(updated, /^ - 1$/mu);
|
||||||
|
});
|
||||||
Loading…
x
Reference in New Issue
Block a user