Chore: refactor output dir

This commit is contained in:
SukkaW 2024-09-06 19:49:17 +08:00
parent 5bc2e017df
commit 315b38b999
19 changed files with 117 additions and 172 deletions

14
.gitignore vendored
View File

@ -5,17 +5,3 @@ node_modules
.cache
public
tmp*
# $ build output
List/
Clash/
Internal/
sing-box/
Modules/sukka_local_dns_mapping.sgmodule
Modules/sukka_url_redirect.sgmodule
Modules/sukka_common_always_realip.sgmodule
Mock/www-google-analytics-com_ga.js
Mock/www-googletagservices-com_gpt.js
Mock/www-google-analytics-com_analytics.js
Mock/www-googlesyndication-com_adsbygoogle.js
Mock/amazon-adsystem-com_amazon-apstag.js

View File

@ -9,6 +9,7 @@ import { domainDeduper } from './lib/domain-deduper';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { sortDomains } from './lib/stable-sort-domain';
import { output } from './lib/misc';
import { SOURCE_DIR } from './constants/dir';
const getS3OSSDomainsPromise = (async (): Promise<string[]> => {
const trie = createTrie(
@ -58,9 +59,9 @@ export const buildCdnDownloadConf = task(require.main === module, __filename)(as
steamDomainSet
] = await Promise.all([
getS3OSSDomainsPromise,
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/cdn.conf')),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/download.conf')),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/steam.conf'))
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/cdn.conf')),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/download.conf')),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/steam.conf'))
]);
appendArrayInPlace(downloadDomainSet, S3OSSDomains.map(domain => `.${domain}`));

View File

@ -11,17 +11,13 @@ import { SHARED_DESCRIPTION } from './lib/constants';
import { fdir as Fdir } from 'fdir';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { removeFiles } from './lib/misc';
import { OUTPUT_CLASH_DIR, OUTPUT_SINGBOX_DIR, OUTPUT_SURGE_DIR, SOURCE_DIR } from './constants/dir';
const MAGIC_COMMAND_SKIP = '# $ custom_build_script';
const MAGIC_COMMAND_RM = '# $ custom_no_output';
const MAGIC_COMMAND_TITLE = '# $ meta_title ';
const MAGIC_COMMAND_DESCRIPTION = '# $ meta_description ';
const sourceDir = path.resolve(__dirname, '../Source');
const outputSurgeDir = path.resolve(__dirname, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash');
const outputSingboxDir = path.resolve(__dirname, '../sing-box');
const domainsetSrcFolder = 'domainset' + path.sep;
export const buildCommon = task(require.main === module, __filename)(async (span) => {
@ -46,12 +42,12 @@ export const buildCommon = task(require.main === module, __filename)(async (span
return true;
})
.crawl(sourceDir)
.crawl(SOURCE_DIR)
.withPromise();
for (let i = 0, len = paths.length; i < len; i++) {
const relativePath = paths[i];
const fullPath = sourceDir + path.sep + relativePath;
const fullPath = SOURCE_DIR + path.sep + relativePath;
if (relativePath.startsWith(domainsetSrcFolder)) {
promises.push(transformDomainset(span, fullPath, relativePath));
@ -127,9 +123,9 @@ function transformDomainset(parentSpan: Span, sourcePath: string, relativePath:
if (res === $rm) {
return removeFiles([
path.resolve(outputSurgeDir, relativePath),
path.resolve(outputClashDir, `${clashFileBasename}.txt`),
path.resolve(outputSingboxDir, `${clashFileBasename}.json`)
path.resolve(OUTPUT_SURGE_DIR, relativePath),
path.resolve(OUTPUT_CLASH_DIR, `${clashFileBasename}.txt`),
path.resolve(OUTPUT_SINGBOX_DIR, `${clashFileBasename}.json`)
]);
}
@ -153,9 +149,9 @@ function transformDomainset(parentSpan: Span, sourcePath: string, relativePath:
deduped,
'domainset',
[
path.resolve(outputSurgeDir, relativePath),
path.resolve(outputClashDir, `${clashFileBasename}.txt`),
path.resolve(outputSingboxDir, `${clashFileBasename}.json`)
path.resolve(OUTPUT_SURGE_DIR, relativePath),
path.resolve(OUTPUT_CLASH_DIR, `${clashFileBasename}.txt`),
path.resolve(OUTPUT_SINGBOX_DIR, `${clashFileBasename}.json`)
]
);
}
@ -176,9 +172,9 @@ async function transformRuleset(parentSpan: Span, sourcePath: string, relativePa
if (res === $rm) {
return removeFiles([
path.resolve(outputSurgeDir, relativePath),
path.resolve(outputClashDir, `${clashFileBasename}.txt`),
path.resolve(outputSingboxDir, `${clashFileBasename}.json`)
path.resolve(OUTPUT_SURGE_DIR, relativePath),
path.resolve(OUTPUT_CLASH_DIR, `${clashFileBasename}.txt`),
path.resolve(OUTPUT_SINGBOX_DIR, `${clashFileBasename}.json`)
]);
}
@ -201,9 +197,9 @@ async function transformRuleset(parentSpan: Span, sourcePath: string, relativePa
lines,
'ruleset',
[
path.resolve(outputSurgeDir, relativePath),
path.resolve(outputClashDir, `${clashFileBasename}.txt`),
path.resolve(outputSingboxDir, `${clashFileBasename}.json`)
path.resolve(OUTPUT_SURGE_DIR, relativePath),
path.resolve(OUTPUT_CLASH_DIR, `${clashFileBasename}.txt`),
path.resolve(OUTPUT_SINGBOX_DIR, `${clashFileBasename}.json`)
]
);
});

View File

@ -1,3 +1,4 @@
import { OUTPUT_CLASH_DIR, OUTPUT_SURGE_DIR } from './constants/dir';
import { compareAndWriteFile } from './lib/create-file';
import { task } from './trace';
import path from 'node:path';
@ -8,15 +9,12 @@ const DEPRECATED_FILES = [
['domainset/reject_phishing', 'This file has been merged with domainset/reject']
];
const outputSurgeDir = path.resolve(__dirname, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash');
export const buildDeprecateFiles = task(require.main === module, __filename)((span) => span.traceChildAsync('create deprecated files', async (childSpan) => {
const promises: Array<Promise<unknown>> = [];
for (const [filePath, description] of DEPRECATED_FILES) {
const surgeFile = path.resolve(outputSurgeDir, `${filePath}.conf`);
const clashFile = path.resolve(outputClashDir, `${filePath}.txt`);
const surgeFile = path.resolve(OUTPUT_SURGE_DIR, `${filePath}.conf`);
const clashFile = path.resolve(OUTPUT_CLASH_DIR, `${filePath}.txt`);
const content = [
'#########################################',

View File

@ -10,10 +10,11 @@ import { createMemoizedPromise } from './lib/memo-promise';
import * as yaml from 'yaml';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { output, writeFile } from './lib/misc';
import { OUTPUT_INTERNAL_DIR, OUTPUT_MODULES_DIR, SOURCE_DIR } from './constants/dir';
export const getDomesticAndDirectDomainsRulesetPromise = createMemoizedPromise(async () => {
const domestics = await readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/domestic.conf'));
const directs = await readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/direct.conf'));
const domestics = await readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/domestic.conf'));
const directs = await readFileIntoProcessedArray(path.resolve(SOURCE_DIR, 'non_ip/direct.conf'));
const lans: string[] = [];
Object.entries(DOMESTICS).forEach(([, { domains }]) => {
@ -91,10 +92,10 @@ export const buildDomesticRuleset = task(require.main === module, __filename)(as
])
])
],
path.resolve(__dirname, '../Modules/sukka_local_dns_mapping.sgmodule')
path.resolve(OUTPUT_MODULES_DIR, 'sukka_local_dns_mapping.sgmodule')
),
writeFile(
path.resolve(__dirname, '../Internal/clash_nameserver_policy.yaml'),
path.join(OUTPUT_INTERNAL_DIR, 'clash_nameserver_policy.yaml'),
yaml.stringify(
{
dns: {

View File

@ -6,6 +6,7 @@ import { getChnCidrPromise } from './build-chn-cidr';
import { NON_CN_CIDR_INCLUDED_IN_CHNROUTE, RESERVED_IPV4_CIDR } from './constants/cidr';
import { writeFile } from './lib/misc';
import { OUTPUT_INTERNAL_DIR } from './constants/dir';
export const buildInternalReverseChnCIDR = task(require.main === module, __filename)(async () => {
const [cidr] = await getChnCidrPromise();
@ -21,8 +22,7 @@ export const buildInternalReverseChnCIDR = task(require.main === module, __filen
)
);
const outputDir = path.resolve(__dirname, '../Internal');
const outputFile = path.join(outputDir, 'reversed-chn-cidr.txt');
const outputFile = path.join(OUTPUT_INTERNAL_DIR, 'reversed-chn-cidr.txt');
return writeFile(
outputFile,

View File

@ -1,69 +1,52 @@
import path from 'node:path';
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import { task } from './trace';
import { treeDir } from './lib/tree-dir';
import type { TreeType, TreeTypeArray } from './lib/tree-dir';
import { fdir as Fdir } from 'fdir';
import Trie from 'mnemonist/trie';
import { OUTPUT_MOCK_DIR, OUTPUT_MODULES_DIR, PUBLIC_DIR, ROOT_DIR } from './constants/dir';
import { writeFile } from './lib/misc';
import picocolors from 'picocolors';
const rootPath = path.resolve(__dirname, '../');
const publicPath = path.resolve(__dirname, '../public');
const mockDir = path.join(ROOT_DIR, 'Mock');
const modulesDir = path.join(ROOT_DIR, 'Modules');
const folderAndFilesToBeDeployed = [
`Mock${path.sep}`,
`List${path.sep}`,
`Clash${path.sep}`,
`sing-box${path.sep}`,
`Modules${path.sep}`,
`Script${path.sep}`,
`Internal${path.sep}`,
'LICENSE'
];
const copyDirContents = async (srcDir: string, destDir: string) => {
const promises: Array<Promise<void>> = [];
for await (const entry of await fsp.opendir(srcDir)) {
const src = path.join(srcDir, entry.name);
const dest = path.join(destDir, entry.name);
if (entry.isDirectory()) {
console.warn(picocolors.red('[build public] cant copy directory'), src);
} else {
promises.push(fsp.copyFile(src, dest, fs.constants.COPYFILE_FICLONE));
}
}
return Promise.all(promises);
};
export const buildPublic = task(require.main === module, __filename)(async (span) => {
fs.mkdirSync(publicPath, { recursive: true });
await span.traceChildAsync('copy rest of the files', async () => {
await Promise.all([
fsp.mkdir(OUTPUT_MODULES_DIR, { recursive: true }),
fsp.mkdir(OUTPUT_MOCK_DIR, { recursive: true })
]);
await span
.traceChild('copy public files')
.traceAsyncFn(async () => {
const trie = Trie.from(await new Fdir()
.withRelativePaths()
.exclude((dirName) => (
dirName === 'node_modules'
|| dirName === 'Build'
|| dirName === 'public'
|| dirName[0] === '.'
))
.crawl(rootPath)
.withPromise());
const filesToBeCopied = folderAndFilesToBeDeployed.flatMap(folderOrFile => trie.find(folderOrFile));
return Promise.all(filesToBeCopied.map(file => {
const src = path.join(rootPath, file);
const dest = path.join(publicPath, file);
const destParen = path.dirname(dest);
if (!fs.existsSync(destParen)) {
fs.mkdirSync(destParen, { recursive: true });
}
return fsp.copyFile(
src,
dest,
fs.constants.COPYFILE_FICLONE
);
}));
});
await Promise.all([
copyDirContents(modulesDir, OUTPUT_MODULES_DIR),
copyDirContents(mockDir, OUTPUT_MOCK_DIR)
]);
});
const html = await span
.traceChild('generate index.html')
.traceAsyncFn(() => treeDir(publicPath).then(generateHtml));
.traceAsyncFn(() => treeDir(PUBLIC_DIR).then(generateHtml));
return writeFile(path.join(publicPath, 'index.html'), html);
return writeFile(path.join(PUBLIC_DIR, 'index.html'), html);
});
const priorityOrder: Record<'default' | string & {}, number> = {

View File

@ -21,8 +21,9 @@ import { getPhishingDomains } from './lib/get-phishing-domains';
import { setAddFromArray, setAddFromArrayCurried } from './lib/set-add-from-array';
import { output } from './lib/misc';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { OUTPUT_INTERNAL_DIR, SOURCE_DIR } from './constants/dir';
const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'));
const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/reject_sukka.conf'));
export const buildRejectDomainSet = task(require.main === module, __filename)(async (span) => {
/** Whitelists */
@ -214,7 +215,7 @@ export const buildRejectDomainSet = task(require.main === module, __filename)(as
compareAndWriteFile(
span,
rejectDomainsStats,
path.resolve(__dirname, '../Internal/reject-stats.txt')
path.join(OUTPUT_INTERNAL_DIR, 'reject-stats.txt')
)
]);
});

View File

@ -4,6 +4,7 @@ import { compareAndWriteFile } from './lib/create-file';
import { DIRECTS, LANS } from '../Source/non_ip/direct';
import * as yaml from 'yaml';
import { writeFile } from './lib/misc';
import { OUTPUT_INTERNAL_DIR, OUTPUT_MODULES_DIR } from './constants/dir';
const HOSTNAMES = [
// Network Detection, Captive Portal
@ -59,10 +60,10 @@ export const buildAlwaysRealIPModule = task(require.main === module, __filename)
'[General]',
`always-real-ip = %APPEND% ${HOSTNAMES.concat(surge).join(', ')}`
],
path.resolve(__dirname, '../Modules/sukka_common_always_realip.sgmodule')
path.resolve(OUTPUT_MODULES_DIR, 'sukka_common_always_realip.sgmodule')
),
writeFile(
path.resolve(__dirname, '../Internal/clash_fake_ip_filter.yaml'),
path.join(OUTPUT_INTERNAL_DIR, 'clash_fake_ip_filter.yaml'),
yaml.stringify(
{
dns: {

View File

@ -3,6 +3,7 @@ import { task } from './trace';
import { compareAndWriteFile } from './lib/create-file';
import { getHostname } from 'tldts';
import { isTruthy } from './lib/misc';
import { OUTPUT_MODULES_DIR } from './constants/dir';
function escapeRegExp(string = '') {
const reRegExpChar = /[$()*+.?[\\\]^{|}]/g;
@ -148,6 +149,6 @@ export const buildRedirectModule = task(require.main === module, __filename)(asy
...REDIRECT_MIRROR.map(([from, to]) => `^https?://${escapeRegExp(from)}(.*) ${to}$1 header`),
...REDIRECT_FAKEWEBSITES.map(([from, to]) => `^https?://(www.)?${escapeRegExp(from)} ${to} 307`)
],
path.resolve(__dirname, '../Modules/sukka_url_redirect.sgmodule')
path.join(OUTPUT_MODULES_DIR, 'sukka_url_redirect.sgmodule')
);
});

View File

@ -12,6 +12,7 @@ import { compareAndWriteFile } from './lib/create-file';
import { getMicrosoftCdnRulesetPromise } from './build-microsoft-cdn';
import { isTruthy } from './lib/misc';
import { appendArrayInPlace } from './lib/append-array-in-place';
import { OUTPUT_INTERNAL_DIR, SOURCE_DIR } from './constants/dir';
const POLICY_GROUPS: Array<[name: string, insertProxy: boolean, insertDirect: boolean]> = [
['Default Proxy', true, false],
@ -55,18 +56,18 @@ export const buildSSPanelUIMAppProfile = task(require.main === module, __filenam
),
getAppleCdnDomainsPromise().then(domains => domains.map(domain => `DOMAIN-SUFFIX,${domain}`)),
getMicrosoftCdnRulesetPromise().then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/apple_cn.conf')),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/neteasemusic.conf')).then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/apple_cn.conf')),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/neteasemusic.conf')).then(surgeRulesetToClashClassicalTextRuleset),
// microsoft & apple - domains
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/microsoft.conf')),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/apple_services.conf')).then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/microsoft.conf')),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/apple_services.conf')).then(surgeRulesetToClashClassicalTextRuleset),
// stream - domains
surgeRulesetToClashClassicalTextRuleset(AllStreamServices.flatMap((i) => i.rules)),
// steam - domains
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/steam.conf')).then(surgeDomainsetToClashRuleset),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/steam.conf')).then(surgeDomainsetToClashRuleset),
// global - domains
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/global.conf')).then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/non_ip/telegram.conf')).then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/global.conf')).then(surgeRulesetToClashClassicalTextRuleset),
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'non_ip/telegram.conf')).then(surgeRulesetToClashClassicalTextRuleset),
// domestic - ip cidr
getChnCidrPromise().then(([cidrs4, cidrs6]) => [
...cidrs4.map(cidr => `IP-CIDR,${cidr}`),
@ -83,7 +84,7 @@ export const buildSSPanelUIMAppProfile = task(require.main === module, __filenam
// global - ip cidr
getTelegramCIDRPromise(),
// lan - ip cidr
readFileIntoProcessedArray(path.resolve(__dirname, '../Source/ip/lan.conf'))
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'ip/lan.conf'))
] as const);
const telegramCidrs = rawTelegramCidrs.map(removeNoResolved);
@ -121,7 +122,7 @@ export const buildSSPanelUIMAppProfile = task(require.main === module, __filenam
await compareAndWriteFile(
span,
output,
path.resolve(__dirname, '../Internal/appprofile.php')
path.resolve(OUTPUT_INTERNAL_DIR, 'appprofile.php')
);
});

13
Build/constants/dir.ts Normal file
View File

@ -0,0 +1,13 @@
import path from 'node:path';
export const ROOT_DIR = path.resolve(__dirname, '../..');
export const SOURCE_DIR = path.join(ROOT_DIR, 'Source');
export const PUBLIC_DIR = path.resolve(ROOT_DIR, 'public');
export const OUTPUT_SURGE_DIR = path.join(PUBLIC_DIR, 'List');
export const OUTPUT_CLASH_DIR = path.resolve(PUBLIC_DIR, 'Clash');
export const OUTPUT_SINGBOX_DIR = path.resolve(PUBLIC_DIR, 'sing-box');
export const OUTPUT_MODULES_DIR = path.resolve(PUBLIC_DIR, 'Modules');
export const OUTPUT_INTERNAL_DIR = path.resolve(PUBLIC_DIR, 'Internal');
export const OUTPUT_MOCK_DIR = path.resolve(PUBLIC_DIR, 'Mock');

View File

@ -1,9 +1,11 @@
import { task } from './trace';
import path from 'node:path';
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import { Readable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import { fetchWithRetry } from './lib/fetch-retry';
import { OUTPUT_MOCK_DIR } from './constants/dir';
const ASSETS_LIST = {
'www-google-analytics-com_ga.js': 'https://raw.githubusercontent.com/AdguardTeam/Scriptlets/master/dist/redirect-files/google-analytics-ga.js',
@ -13,19 +15,20 @@ const ASSETS_LIST = {
'amazon-adsystem-com_amazon-apstag.js': 'https://raw.githubusercontent.com/AdguardTeam/Scriptlets/master/dist/redirect-files/amazon-apstag.js'
} as const;
const mockDir = path.resolve(__dirname, '../Mock');
export const downloadMockAssets = task(require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map(
([filename, url]) => span
.traceChildAsync(url, () => fetchWithRetry(url).then(res => {
const src = path.join(mockDir, filename);
.traceChildAsync(url, async () => {
const res = await fetchWithRetry(url);
const src = path.join(OUTPUT_MOCK_DIR, filename);
if (!res.body) {
throw new Error(`Empty body from ${url}`);
}
await fsp.mkdir(OUTPUT_MOCK_DIR, { recursive: true });
return pipeline(
Readable.fromWeb(res.body),
fs.createWriteStream(src, 'utf-8')
);
}))
})
)));

View File

@ -1,57 +1,17 @@
import { existsSync, createWriteStream } from 'node:fs';
import { createWriteStream } from 'node:fs';
import { mkdir } from 'node:fs/promises';
import path from 'node:path';
import { pipeline } from 'node:stream/promises';
import { readFileByLine } from './lib/fetch-text-by-line';
import { isCI } from 'ci-info';
import { task } from './trace';
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
import tarStream from 'tar-stream';
import zlib from 'node:zlib';
import { Readable } from 'node:stream';
const IS_READING_BUILD_OUTPUT = 1 << 2;
const ALL_FILES_EXISTS = 1 << 3;
const GITHUB_CODELOAD_URL = 'https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master';
const GITLAB_CODELOAD_URL = 'https://gitlab.com/SukkaW/ruleset.skk.moe/-/archive/master/ruleset.skk.moe-master.tar.gz';
export const downloadPreviousBuild = task(require.main === module, __filename)(async (span) => {
const buildOutputList: string[] = [];
let flag = 1 | ALL_FILES_EXISTS;
await span
.traceChild('read .gitignore')
.traceAsyncFn(async () => {
for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) {
if (line === '# $ build output') {
flag = flag | IS_READING_BUILD_OUTPUT;
continue;
}
if (!(flag & IS_READING_BUILD_OUTPUT)) {
continue;
}
buildOutputList.push(line);
if (!isCI && !existsSync(path.join(__dirname, '..', line))) {
flag = flag & ~ALL_FILES_EXISTS;
}
}
});
if (isCI) {
flag = flag & ~ALL_FILES_EXISTS;
}
if (flag & ALL_FILES_EXISTS) {
console.log('All files exists, skip download.');
return;
}
const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
const tarGzUrl = await span.traceChildAsync('get tar.gz url', async () => {
const resp = await fetchWithRetry(GITHUB_CODELOAD_URL, {
...defaultRequestInit,
@ -68,6 +28,8 @@ export const downloadPreviousBuild = task(require.main === module, __filename)(a
return GITHUB_CODELOAD_URL;
});
const publicDir = path.resolve(__dirname, '..', 'public');
return span.traceChildAsync('download & extract previoud build', async () => {
const resp = await fetchWithRetry(tarGzUrl, {
headers: {
@ -112,14 +74,9 @@ export const downloadPreviousBuild = task(require.main === module, __filename)(a
entry.resume(); // Drain the entry
continue;
}
// filter entry
if (!filesList.some(f => entry.header.name.startsWith(f))) {
entry.resume(); // Drain the entry
continue;
}
const relativeEntryPath = entry.header.name.replace(pathPrefix, '');
const targetPath = path.join(__dirname, '..', relativeEntryPath);
const targetPath = path.join(publicDir, relativeEntryPath);
await mkdir(path.dirname(targetPath), { recursive: true });
await pipeline(entry, createWriteStream(targetPath));

View File

@ -3,8 +3,9 @@ import { processLine, processLineFromReadline } from './process-line';
import { readFileByLine } from './fetch-text-by-line';
import path from 'node:path';
import fsp from 'node:fs/promises';
import { SOURCE_DIR } from '../constants/dir';
const file = path.resolve(__dirname, '../../Source/domainset/cdn.conf');
const file = path.join(SOURCE_DIR, 'domainset/cdn.conf');
group('read file by line', () => {
bench('readFileByLine', () => processLineFromReadline(readFileByLine(file)));

View File

@ -1,6 +1,7 @@
import path, { dirname } from 'node:path';
import fs from 'node:fs';
import fsp from 'node:fs/promises';
import { OUTPUT_CLASH_DIR, OUTPUT_SINGBOX_DIR, OUTPUT_SURGE_DIR } from '../constants/dir';
export const isTruthy = <T>(i: T | 0 | '' | false | null | undefined): i is T => !!i;
@ -52,10 +53,6 @@ export const domainWildCardToRegex = (domain: string) => {
return result;
};
const OUTPUT_SURGE_DIR = path.resolve(__dirname, '../../List');
const OUTPUT_CLASH_DIR = path.resolve(__dirname, '../../Clash');
const OUTPUT_SINGBOX_DIR = path.resolve(__dirname, '../../sing-box');
export const output = (id: string, type: 'non_ip' | 'ip' | 'domainset') => {
return [
path.join(OUTPUT_SURGE_DIR, type, id + '.conf'),

View File

@ -25,6 +25,11 @@ export const treeDir = async (rootPath: string): Promise<TreeTypeArray> => {
const walk = async (dir: string, node: TreeTypeArray, dirRelativeToRoot = ''): Promise<VoidOrVoidArray> => {
const promises: Array<Promise<VoidOrVoidArray>> = [];
for await (const child of await fsp.opendir(dir)) {
// Ignore hidden files
if (child.name[0] === '.' || child.name === 'CNAME') {
continue;
}
const childFullPath = child.parentPath + sep + child.name;
const childRelativeToRoot = dirRelativeToRoot + sep + child.name;

View File

@ -2,8 +2,7 @@ import path from 'node:path';
import fsp from 'node:fs/promises';
import { fdir as Fdir } from 'fdir';
import { readFileByLine } from './lib/fetch-text-by-line';
const sourceDir = path.resolve(__dirname, '../Source');
import { SOURCE_DIR } from './constants/dir';
(async () => {
const promises: Array<Promise<unknown>> = [];
@ -27,7 +26,7 @@ const sourceDir = path.resolve(__dirname, '../Source');
return true;
})
.crawl(sourceDir)
.crawl(SOURCE_DIR)
.withPromise();
for (let i = 0, len = paths.length; i < len; i++) {

View File

@ -5,6 +5,7 @@ import { createTrie } from './lib/trie';
import { parse } from 'csv-parse/sync';
import { readFileByLine } from './lib/fetch-text-by-line';
import path from 'node:path';
import { SOURCE_DIR } from './constants/dir';
export const parseGfwList = async () => {
const whiteSet = new Set<string>();
@ -105,8 +106,8 @@ export const parseGfwList = async () => {
};
await Promise.all([
runAgainstRuleset(path.resolve(__dirname, '../Source/non_ip/global.conf')),
runAgainstRuleset(path.resolve(__dirname, '../Source/non_ip/telegram.conf')),
runAgainstRuleset(path.join(SOURCE_DIR, 'non_ip/global.conf')),
runAgainstRuleset(path.join(SOURCE_DIR, 'non_ip/telegram.conf')),
runAgainstRuleset(path.resolve(__dirname, '../List/non_ip/stream.conf'))
]);