style: eslint import order

This commit is contained in:
Slinetrac
2025-11-06 14:15:33 +08:00
Unverified
parent c715327739
commit 13a190500b
13 changed files with 49 additions and 161 deletions

View File

@@ -17,6 +17,7 @@ export default defineConfig([
plugins: {
js: eslintJS,
// @ts-expect-error -- https://github.com/typescript-eslint/typescript-eslint/issues/11543
"react-hooks": pluginReactHooks,
// @ts-expect-error -- https://github.com/un-ts/eslint-plugin-import-x/issues/421
"import-x": pluginImportX,
@@ -132,4 +133,14 @@ export default defineConfig([
"prettier/prettier": "warn",
},
},
{
files: ["scripts/**/*.{js,mjs,cjs}", "scripts-workflow/**/*.{js,mjs,cjs}"],
languageOptions: {
globals: {
...globals.browser,
...globals.node,
},
},
},
]);

View File

@@ -2,7 +2,6 @@
import fs from "fs";
import path from "path";
import process from "process";
import { fileURLToPath } from "url";
import ts from "typescript";

View File

@@ -1,7 +1,7 @@
import { exec } from "child_process";
import { promisify } from "util";
import fs from "fs/promises";
import path from "path";
import { promisify } from "util";
/**
* 为Alpha版本重命名版本号

View File

@@ -66,7 +66,7 @@ const loadNamespaceJson = async () => {
};
const buildKeysFile = (keys) => {
const arrayLiteral = keys.map((key) => ` \"${key}\"`).join(",\n");
const arrayLiteral = keys.map((key) => ` "${key}"`).join(",\n");
return `// This file is auto-generated by scripts/generate-i18n-keys.mjs\n// Do not edit this file manually.\n\nexport const translationKeys = [\n${arrayLiteral}\n] as const;\n\nexport type TranslationKey = typeof translationKeys[number];\n`;
};

View File

@@ -1,9 +1,10 @@
import fs from "fs";
import fsp from "fs/promises";
import path from "path";
import AdmZip from "adm-zip";
import { createRequire } from "module";
import path from "path";
import { getOctokit, context } from "@actions/github";
import AdmZip from "adm-zip";
const target = process.argv.slice(2)[0];
const alpha = process.argv.slice(2)[1];
@@ -79,11 +80,11 @@ async function resolvePortable() {
tag,
});
let assets = release.assets.filter((x) => {
const assets = release.assets.filter((x) => {
return x.name === zipFile;
});
if (assets.length > 0) {
let id = assets[0].id;
const id = assets[0].id;
await github.rest.repos.deleteReleaseAsset({
...options,
asset_id: id,

View File

@@ -1,8 +1,9 @@
import fs from "fs";
import path from "path";
import AdmZip from "adm-zip";
import { createRequire } from "module";
import fsp from "fs/promises";
import { createRequire } from "module";
import path from "path";
import AdmZip from "adm-zip";
const target = process.argv.slice(2)[0];
const ARCH_MAP = {

View File

@@ -1,14 +1,16 @@
import AdmZip from "adm-zip";
import { execSync } from "child_process";
import { createHash } from "crypto";
import fs from "fs";
import fsp from "fs/promises";
import path from "path";
import zlib from "zlib";
import AdmZip from "adm-zip";
import { glob } from "glob";
import { HttpsProxyAgent } from "https-proxy-agent";
import fetch from "node-fetch";
import path from "path";
import { extract } from "tar";
import zlib from "zlib";
import { log_debug, log_error, log_info, log_success } from "./utils.mjs";
/**
@@ -55,7 +57,7 @@ const ARCH_MAP = {
const arg1 = process.argv.slice(2)[0];
const arg2 = process.argv.slice(2)[1];
let target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
const target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
const { platform, arch } = target
? { platform: PLATFORM_MAP[target], arch: ARCH_MAP[target] }
: process;
@@ -113,7 +115,7 @@ async function calculateFileHash(filePath) {
const hashSum = createHash("sha256");
hashSum.update(fileBuffer);
return hashSum.digest("hex");
} catch (err) {
} catch (ignoreErr) {
return null;
}
}
@@ -547,9 +549,9 @@ const resolveServicePermission = async () => {
const hashCache = await loadHashCache();
let hasChanges = false;
for (let f of serviceExecutables) {
for (const f of serviceExecutables) {
const files = glob.sync(path.join(resDir, f));
for (let filePath of files) {
for (const filePath of files) {
if (fs.existsSync(filePath)) {
const currentHash = await calculateFileHash(filePath);
const cacheKey = `${filePath}_chmod`;
@@ -611,24 +613,24 @@ async function resolveLocales() {
// =======================
const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service-ipc/releases/download/${SIDECAR_HOST}`;
const resolveService = () => {
let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
const ext = platform === "win32" ? ".exe" : "";
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
return resolveResource({
file: "clash-verge-service" + suffix + ext,
downloadURL: `${SERVICE_URL}/clash-verge-service${ext}`,
});
};
const resolveInstall = () => {
let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
const ext = platform === "win32" ? ".exe" : "";
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
return resolveResource({
file: "clash-verge-service-install" + suffix + ext,
downloadURL: `${SERVICE_URL}/clash-verge-service-install${ext}`,
});
};
const resolveUninstall = () => {
let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
const ext = platform === "win32" ? ".exe" : "";
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
return resolveResource({
file: "clash-verge-service-uninstall" + suffix + ext,
downloadURL: `${SERVICE_URL}/clash-verge-service-uninstall${ext}`,

View File

@@ -30,10 +30,11 @@
*/
import { execSync } from "child_process";
import { program } from "commander";
import fs from "fs/promises";
import path from "path";
import { program } from "commander";
/**
* 获取当前 git 短 commit hash
* @returns {string}

View File

@@ -1,131 +0,0 @@
#!/usr/bin/env node
/**
* One-time helper to split flat locale JSON files (e.g. en.json)
* into per-namespace files (e.g. en/shared.json, en/settings.json).
*/
import fs from "fs/promises";
import path from "path";
import process from "process";
const ROOT = process.cwd();
const LOCALES_DIR = path.join(ROOT, "src/locales");
async function ensureDir(dirPath) {
await fs.mkdir(dirPath, { recursive: true });
}
const RESERVED = new Set([
"default",
"function",
"var",
"let",
"const",
"import",
]);
function toIdentifier(namespace, taken) {
let base = namespace
.replace(/[^a-zA-Z0-9_$]/g, "_")
.replace(/^[^a-zA-Z_$]+/, "");
if (!base) {
base = "ns";
}
let candidate = base;
let counter = 1;
while (RESERVED.has(candidate) || taken.has(candidate)) {
candidate = `${base}_${counter}`;
counter += 1;
}
taken.add(candidate);
return candidate;
}
async function splitLocaleFile(filePath, lang) {
const raw = await fs.readFile(filePath, "utf-8");
let data;
try {
data = JSON.parse(raw);
} catch (err) {
throw new Error(`Failed to parse ${filePath}: ${err.message}`);
}
if (typeof data !== "object" || data === null || Array.isArray(data)) {
throw new Error(`Locale file ${filePath} must contain a JSON object`);
}
const langDir = path.join(LOCALES_DIR, lang);
await ensureDir(langDir);
const namespaces = Object.entries(data);
if (namespaces.length === 0) {
console.warn(`Locale ${lang} has no keys, skipping.`);
return;
}
const identifiers = new Map();
const taken = new Set();
for (const [namespace, value] of namespaces) {
if (typeof value !== "object" || value === null || Array.isArray(value)) {
throw new Error(
`Locale ${lang} namespace "${namespace}" must be an object`,
);
}
const targetPath = path.join(langDir, `${namespace}.json`);
const payload = `${JSON.stringify(value, null, 2)}\n`;
await fs.writeFile(targetPath, payload, "utf-8");
identifiers.set(namespace, toIdentifier(namespace, taken));
}
const importLines = namespaces
.map(([namespace]) => {
const ident = identifiers.get(namespace);
return `import ${ident} from "./${namespace}.json";`;
})
.join("\n");
const exportBody = namespaces
.map(([namespace]) => {
const ident = identifiers.get(namespace);
return ` "${namespace}": ${ident},`;
})
.join("\n");
const indexContent = `${importLines}
const resources = {
${exportBody}
};
export default resources;
`;
await fs.writeFile(path.join(langDir, "index.ts"), indexContent, "utf-8");
await fs.rm(filePath);
console.log(`Split ${lang}.json into ${namespaces.length} namespaces.`);
}
async function main() {
const entries = await fs.readdir(LOCALES_DIR, { withFileTypes: true });
const localeFiles = entries.filter(
(entry) => entry.isFile() && entry.name.endsWith(".json"),
);
if (localeFiles.length === 0) {
console.log("No flat locale JSON files found. Nothing to do.");
return;
}
for (const entry of localeFiles) {
const lang = entry.name.replace(/\.json$/, "");
const filePath = path.join(LOCALES_DIR, entry.name);
await splitLocaleFile(filePath, lang);
}
}
main().catch((err) => {
console.error(err);
process.exitCode = 1;
});

View File

@@ -1,5 +1,7 @@
import axios from "axios";
import { readFileSync } from "fs";
import axios from "axios";
import { log_error, log_info, log_success } from "./utils.mjs";
const CHAT_ID_RELEASE = "@clash_verge_re"; // 正式发布频道

View File

@@ -58,7 +58,7 @@ export async function resolveUpdateLogDefault() {
const reEnd = /^---/;
let isCapturing = false;
let content = [];
const content = [];
let firstTag = "";
for (const line of data.split("\n")) {

View File

@@ -1,5 +1,6 @@
import fetch from "node-fetch";
import { getOctokit, context } from "@actions/github";
import fetch from "node-fetch";
import { resolveUpdateLog } from "./updatelog.mjs";
const UPDATE_TAG_NAME = "updater";
@@ -113,7 +114,7 @@ async function resolveUpdater() {
});
// delete the old assets
for (let asset of updateRelease.assets) {
for (const asset of updateRelease.assets) {
if (asset.name === UPDATE_JSON_FILE) {
await github.rest.repos.deleteReleaseAsset({
...options,

View File

@@ -1,5 +1,6 @@
import fetch from "node-fetch";
import { getOctokit, context } from "@actions/github";
import fetch from "node-fetch";
import { resolveUpdateLog, resolveUpdateLogDefault } from "./updatelog.mjs";
// Add stable update JSON filenames
@@ -259,7 +260,7 @@ async function processRelease(github, options, tag, isAlpha) {
const proxyFile = isAlpha ? ALPHA_UPDATE_JSON_PROXY : UPDATE_JSON_PROXY;
// Delete existing assets with these names
for (let asset of updateRelease.assets) {
for (const asset of updateRelease.assets) {
if (asset.name === jsonFile) {
await github.rest.repos.deleteReleaseAsset({
...options,