Chore: build infra changes

This commit is contained in:
SukkaW 2023-11-23 22:09:01 +08:00
parent 265df07893
commit 101913e4f1
26 changed files with 187 additions and 191 deletions

View File

@ -5,6 +5,7 @@ import { createRuleset } from './lib/create-file';
import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib/fetch-remote-text-by-line'; import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib/fetch-remote-text-by-line';
import { processLine } from './lib/process-line'; import { processLine } from './lib/process-line';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
const getBogusNxDomainIPs = async () => { const getBogusNxDomainIPs = async () => {
/** @type {string[]} */ /** @type {string[]} */
@ -22,12 +23,12 @@ const getBogusNxDomainIPs = async () => {
return result; return result;
}; };
export const buildAntiBogusDomain = task(__filename, async () => { export const buildAntiBogusDomain = task(import.meta.path, async () => {
const bogusIpPromise = getBogusNxDomainIPs(); const bogusIpPromise = getBogusNxDomainIPs();
/** @type {string[]} */ /** @type {string[]} */
const result = []; const result = [];
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/ip/reject.conf'))) { for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/ip/reject.conf'))) {
if (line === '# --- [Anti Bogus Domain Replace Me] ---') { if (line === '# --- [Anti Bogus Domain Replace Me] ---') {
(await bogusIpPromise).forEach(rule => result.push(rule)); (await bogusIpPromise).forEach(rule => result.push(rule));
continue; continue;
@ -40,9 +41,7 @@ export const buildAntiBogusDomain = task(__filename, async () => {
} }
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'This file contains known addresses that are hijacking NXDOMAIN results returned by DNS servers.', 'This file contains known addresses that are hijacking NXDOMAIN results returned by DNS servers.',
'', '',
@ -56,8 +55,8 @@ export const buildAntiBogusDomain = task(__filename, async () => {
new Date(), new Date(),
result, result,
'ruleset', 'ruleset',
path.resolve(__dirname, '../List/ip/reject.conf'), path.resolve(import.meta.dir, '../List/ip/reject.conf'),
path.resolve(__dirname, '../Clash/ip/reject.txt') path.resolve(import.meta.dir, '../Clash/ip/reject.txt')
)); ));
}); });

View File

@ -3,14 +3,13 @@ import path from 'path';
import { createRuleset } from './lib/create-file'; import { createRuleset } from './lib/create-file';
import { parseFelixDnsmasq } from './lib/parse-dnsmasq'; import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildAppleCdn = task(__filename, async () => { export const buildAppleCdn = task(import.meta.path, async () => {
const res = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf'); const res = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf');
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'This file contains Apple\'s domains using their China mainland CDN servers.', 'This file contains Apple\'s domains using their China mainland CDN servers.',
'', '',
@ -28,8 +27,8 @@ export const buildAppleCdn = task(__filename, async () => {
new Date(), new Date(),
ruleset, ruleset,
'ruleset', 'ruleset',
path.resolve(__dirname, '../List/non_ip/apple_cdn.conf'), path.resolve(import.meta.dir, '../List/non_ip/apple_cdn.conf'),
path.resolve(__dirname, '../Clash/non_ip/apple_cdn.txt') path.resolve(import.meta.dir, '../Clash/non_ip/apple_cdn.txt')
), ),
...createRuleset( ...createRuleset(
'Sukka\'s Ruleset - Apple CDN', 'Sukka\'s Ruleset - Apple CDN',
@ -37,8 +36,8 @@ export const buildAppleCdn = task(__filename, async () => {
new Date(), new Date(),
domainset, domainset,
'domainset', 'domainset',
path.resolve(__dirname, '../List/domainset/apple_cdn.conf'), path.resolve(import.meta.dir, '../List/domainset/apple_cdn.conf'),
path.resolve(__dirname, '../Clash/domainset/apple_cdn.txt') path.resolve(import.meta.dir, '../Clash/domainset/apple_cdn.txt')
) )
]); ]);
}); });

View File

@ -4,8 +4,9 @@ import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib
import { createTrie } from './lib/trie'; import { createTrie } from './lib/trie';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { processLine } from './lib/process-line'; import { processLine } from './lib/process-line';
import { SHARED_DESCRIPTION } from './lib/constants';
const publicSuffixPath: string = path.resolve(__dirname, '../node_modules/.cache/public_suffix_list_dat.txt'); const publicSuffixPath: string = path.resolve(import.meta.dir, '../node_modules/.cache/public_suffix_list_dat.txt');
const getS3OSSDomains = async (): Promise<Set<string>> => { const getS3OSSDomains = async (): Promise<Set<string>> => {
const trie = createTrie(); const trie = createTrie();
@ -55,13 +56,13 @@ const getS3OSSDomains = async (): Promise<Set<string>> => {
return S3OSSDomains; return S3OSSDomains;
}; };
const buildCdnConf = task(__filename, async () => { const buildCdnConf = task(import.meta.path, async () => {
/** @type {string[]} */ /** @type {string[]} */
const cdnDomainsList: string[] = []; const cdnDomainsList: string[] = [];
const getS3OSSDomainsPromise: Promise<Set<string>> = getS3OSSDomains(); const getS3OSSDomainsPromise: Promise<Set<string>> = getS3OSSDomains();
for await (const l of readFileByLine(path.resolve(__dirname, '../Source/non_ip/cdn.conf'))) { for await (const l of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf'))) {
if (l === '# --- [AWS S3 Replace Me] ---') { if (l === '# --- [AWS S3 Replace Me] ---') {
(await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); }); (await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
continue; continue;
@ -73,9 +74,7 @@ const buildCdnConf = task(__filename, async () => {
} }
const description: string[] = [ const description: string[] = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'This file contains object storage and static assets CDN domains.' 'This file contains object storage and static assets CDN domains.'
]; ];
@ -86,8 +85,8 @@ const buildCdnConf = task(__filename, async () => {
new Date(), new Date(),
cdnDomainsList, cdnDomainsList,
'ruleset', 'ruleset',
path.resolve(__dirname, '../List/non_ip/cdn.conf'), path.resolve(import.meta.dir, '../List/non_ip/cdn.conf'),
path.resolve(__dirname, '../Clash/non_ip/cdn.txt') path.resolve(import.meta.dir, '../Clash/non_ip/cdn.txt')
)); ));
}); });

View File

@ -10,7 +10,7 @@ const EXCLUDE_CIDRS = [
'223.120.0.0/15' '223.120.0.0/15'
]; ];
export const buildChnCidr = task(__filename, async () => { export const buildChnCidr = task(import.meta.path, async () => {
const [{ exclude }, cidr] = await Promise.all([ const [{ exclude }, cidr] = await Promise.all([
import('cidr-tools-wasm'), import('cidr-tools-wasm'),
processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')) processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt'))
@ -18,6 +18,7 @@ export const buildChnCidr = task(__filename, async () => {
const filteredCidr = exclude(cidr, EXCLUDE_CIDRS, true); const filteredCidr = exclude(cidr, EXCLUDE_CIDRS, true);
// Can not use SHARED_DESCRIPTION here as different license
const description = [ const description = [
'License: CC BY-SA 2.0', 'License: CC BY-SA 2.0',
'Homepage: https://ruleset.skk.moe', 'Homepage: https://ruleset.skk.moe',
@ -34,7 +35,7 @@ export const buildChnCidr = task(__filename, async () => {
new Date(), new Date(),
filteredCidr.map(i => `IP-CIDR,${i}`) filteredCidr.map(i => `IP-CIDR,${i}`)
), ),
pathResolve(__dirname, '../List/ip/china_ip.conf') pathResolve(import.meta.dir, '../List/ip/china_ip.conf')
), ),
compareAndWriteFile( compareAndWriteFile(
withBannerArray( withBannerArray(
@ -43,7 +44,7 @@ export const buildChnCidr = task(__filename, async () => {
new Date(), new Date(),
filteredCidr filteredCidr
), ),
pathResolve(__dirname, '../Clash/ip/china_ip.txt') pathResolve(import.meta.dir, '../Clash/ip/china_ip.txt')
) )
]); ]);
}); });

View File

@ -7,16 +7,17 @@ import { processLine } from './lib/process-line';
import { createRuleset } from './lib/create-file'; import { createRuleset } from './lib/create-file';
import { domainDeduper } from './lib/domain-deduper'; import { domainDeduper } from './lib/domain-deduper';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
const MAGIC_COMMAND_SKIP = '# $ custom_build_script'; const MAGIC_COMMAND_SKIP = '# $ custom_build_script';
const MAGIC_COMMAND_TITLE = '# $ meta_title '; const MAGIC_COMMAND_TITLE = '# $ meta_title ';
const MAGIC_COMMAND_DESCRIPTION = '# $ meta_description '; const MAGIC_COMMAND_DESCRIPTION = '# $ meta_description ';
const sourceDir = path.resolve(__dirname, '../Source'); const sourceDir = path.resolve(import.meta.dir, '../Source');
const outputSurgeDir = path.resolve(__dirname, '../List'); const outputSurgeDir = path.resolve(import.meta.dir, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash'); const outputClashDir = path.resolve(import.meta.dir, '../Clash');
export const buildCommon = task(__filename, async () => { export const buildCommon = task(import.meta.path, async () => {
const promises: Promise<unknown>[] = []; const promises: Promise<unknown>[] = [];
const pw = new PathScurry(sourceDir); const pw = new PathScurry(sourceDir);
@ -49,30 +50,38 @@ if (import.meta.main) {
} }
const processFile = async (sourcePath: string) => { const processFile = async (sourcePath: string) => {
console.log('Processing', sourcePath);
const lines: string[] = []; const lines: string[] = [];
let title = ''; let title = '';
const descriptions: string[] = []; const descriptions: string[] = [];
for await (const line of readFileByLine(sourcePath)) {
if (line === MAGIC_COMMAND_SKIP) {
return;
}
if (line.startsWith(MAGIC_COMMAND_TITLE)) { try {
title = line.slice(MAGIC_COMMAND_TITLE.length).trim(); for await (const line of readFileByLine(sourcePath)) {
continue; if (line === MAGIC_COMMAND_SKIP) {
} return;
}
if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) { if (line.startsWith(MAGIC_COMMAND_TITLE)) {
descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim()); title = line.slice(MAGIC_COMMAND_TITLE.length).trim();
continue; continue;
} }
const l = processLine(line); if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) {
if (l) { descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim());
lines.push(l); continue;
}
const l = processLine(line);
if (l) {
lines.push(l);
}
} }
} catch (e) {
console.error('Error processing', sourcePath);
console.trace(e);
} }
return [title, descriptions, lines] as const; return [title, descriptions, lines] as const;
@ -85,9 +94,7 @@ async function transformDomainset(sourcePath: string, relativePath: string) {
const deduped = domainDeduper(lines); const deduped = domainDeduper(lines);
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...( ...(
descriptions.length descriptions.length
? ['', ...descriptions] ? ['', ...descriptions]
@ -115,9 +122,7 @@ async function transformRuleset(sourcePath: string, relativePath: string) {
const [title, descriptions, lines] = res; const [title, descriptions, lines] = res;
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...( ...(
descriptions.length descriptions.length
? ['', ...descriptions] ? ['', ...descriptions]

View File

@ -5,9 +5,10 @@ import { readFileByLine } from './lib/fetch-remote-text-by-line';
import { processLineFromReadline } from './lib/process-line'; import { processLineFromReadline } from './lib/process-line';
import { compareAndWriteFile, createRuleset } from './lib/create-file'; import { compareAndWriteFile, createRuleset } from './lib/create-file';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildDomesticRuleset = task(__filename, async () => { export const buildDomesticRuleset = task(import.meta.path, async () => {
const results = await processLineFromReadline(readFileByLine(path.resolve(__dirname, '../Source/non_ip/domestic.conf'))); const results = await processLineFromReadline(readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/domestic.conf')));
results.push( results.push(
...Object.entries(DOMESTICS) ...Object.entries(DOMESTICS)
@ -22,9 +23,7 @@ export const buildDomesticRuleset = task(__filename, async () => {
); );
const rulesetDescription = [ const rulesetDescription = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'This file contains known addresses that are avaliable in the Mainland China.' 'This file contains known addresses that are avaliable in the Mainland China.'
]; ];
@ -36,8 +35,8 @@ export const buildDomesticRuleset = task(__filename, async () => {
new Date(), new Date(),
results, results,
'ruleset', 'ruleset',
path.resolve(__dirname, '../List/non_ip/domestic.conf'), path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'),
path.resolve(__dirname, '../Clash/non_ip/domestic.txt') path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt')
), ),
compareAndWriteFile( compareAndWriteFile(
[ [
@ -53,7 +52,7 @@ export const buildDomesticRuleset = task(__filename, async () => {
]) ])
) )
], ],
path.resolve(__dirname, '../Modules/sukka_local_dns_mapping.sgmodule') path.resolve(import.meta.dir, '../Modules/sukka_local_dns_mapping.sgmodule')
) )
]); ]);
}); });

View File

@ -12,7 +12,7 @@ import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, '\\$&'); const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, '\\$&');
export const buildInternalCDNDomains = task(__filename, async () => { export const buildInternalCDNDomains = task(import.meta.path, async () => {
const set = new Set<string>(); const set = new Set<string>();
const keywords = new Set(); const keywords = new Set();
@ -63,16 +63,16 @@ export const buildInternalCDNDomains = task(__filename, async () => {
}; };
await Promise.all([ await Promise.all([
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/cdn.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global_plus.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/my_proxy.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/stream.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')), processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/telegram.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')), processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/cdn.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')), processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/download.conf')),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]); ]);
return compareAndWriteFile( return compareAndWriteFile(
@ -80,7 +80,7 @@ export const buildInternalCDNDomains = task(__filename, async () => {
...Array.from(set).sort(domainSorter).map(i => `SUFFIX,${i}`), ...Array.from(set).sort(domainSorter).map(i => `SUFFIX,${i}`),
...Array.from(keywords).sort().map(i => `REGEX,${i}`) ...Array.from(keywords).sort().map(i => `REGEX,${i}`)
], ],
path.resolve(__dirname, '../List/internal/cdn.txt') path.resolve(import.meta.dir, '../List/internal/cdn.txt')
); );
}); });

View File

@ -4,15 +4,15 @@ import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { compareAndWriteFile } from './lib/create-file'; import { compareAndWriteFile } from './lib/create-file';
export const buildInternalChnDomains = task(__filename, async () => { export const buildInternalChnDomains = task(import.meta.path, async () => {
const [result] = await Promise.all([ const [result] = await Promise.all([
parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'), parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]); ]);
return compareAndWriteFile( return compareAndWriteFile(
result.map(line => `SUFFIX,${line}`), result.map(line => `SUFFIX,${line}`),
path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt') path.resolve(import.meta.dir, '../List/internal/accelerated-china-domains.txt')
); );
}); });

View File

@ -22,11 +22,11 @@ const RESERVED_IPV4_CIDR = [
'240.0.0.0/4' '240.0.0.0/4'
]; ];
export const buildInternalReverseChnCIDR = task(__filename, async () => { export const buildInternalReverseChnCIDR = task(import.meta.path, async () => {
const [{ exclude }, cidr] = await Promise.all([ const [{ exclude }, cidr] = await Promise.all([
import('cidr-tools-wasm'), import('cidr-tools-wasm'),
processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')), processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]); ]);
const reversedCidr = exclude( const reversedCidr = exclude(
@ -40,7 +40,7 @@ export const buildInternalReverseChnCIDR = task(__filename, async () => {
true true
); );
return Bun.write(path.resolve(__dirname, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`); return Bun.write(path.resolve(import.meta.dir, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`);
}); });
if (import.meta.main) { if (import.meta.main) {

View File

@ -8,6 +8,7 @@ import createTrie from './lib/trie';
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
import { createCachedGorhillGetDomain } from './lib/cached-tld-parse'; import { createCachedGorhillGetDomain } from './lib/cached-tld-parse';
import * as tldts from 'tldts'; import * as tldts from 'tldts';
import { SHARED_DESCRIPTION } from './lib/constants';
const WHITELIST_DOMAIN = new Set([ const WHITELIST_DOMAIN = new Set([
'w3s.link', 'w3s.link',
@ -63,7 +64,7 @@ const BLACK_TLD = new Set([
'com.cn' 'com.cn'
]); ]);
export const buildPhishingDomainSet = task(__filename, async () => { export const buildPhishingDomainSet = task(import.meta.path, async () => {
const [{ black: domainSet }, gorhill] = await Promise.all([ const [{ black: domainSet }, gorhill] = await Promise.all([
processFilterRules( processFilterRules(
'https://curbengh.github.io/phishing-filter/phishing-filter-agh.txt', 'https://curbengh.github.io/phishing-filter/phishing-filter-agh.txt',
@ -162,9 +163,7 @@ export const buildPhishingDomainSet = task(__filename, async () => {
.sort(domainSorter)); .sort(domainSorter));
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'The domainset supports enhanced phishing protection', 'The domainset supports enhanced phishing protection',
'Build from:', 'Build from:',
@ -177,8 +176,8 @@ export const buildPhishingDomainSet = task(__filename, async () => {
new Date(), new Date(),
results, results,
'domainset', 'domainset',
path.resolve(__dirname, '../List/domainset/reject_phishing.conf'), path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'),
path.resolve(__dirname, '../Clash/domainset/reject_phishing.txt') path.resolve(import.meta.dir, '../Clash/domainset/reject_phishing.txt')
)); ));
}); });

View File

@ -3,8 +3,8 @@ import path from 'path';
import fsp from 'fs/promises' import fsp from 'fs/promises'
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
const rootPath = path.resolve(__dirname, '../'); const rootPath = path.resolve(import.meta.dir, '../');
const publicPath = path.resolve(__dirname, '../public'); const publicPath = path.resolve(import.meta.dir, '../public');
const folderAndFilesToBeDeployed = [ const folderAndFilesToBeDeployed = [
'Assets', 'Assets',
@ -15,7 +15,7 @@ const folderAndFilesToBeDeployed = [
'LICENSE' 'LICENSE'
]; ];
export const buildPublicHtml = task(__filename, async () => { export const buildPublicHtml = task(import.meta.path, async () => {
await fsp.mkdir(publicPath, { recursive: true }); await fsp.mkdir(publicPath, { recursive: true });
await Promise.all(folderAndFilesToBeDeployed.map(dir => fsp.cp( await Promise.all(folderAndFilesToBeDeployed.map(dir => fsp.cp(
path.resolve(rootPath, dir), path.resolve(rootPath, dir),

View File

@ -71,7 +71,7 @@ const REDIRECT = /** @type {const} */ ([
['googleajax.wp-china-yes.net/', 'https://ajax.googleapis.com/'] ['googleajax.wp-china-yes.net/', 'https://ajax.googleapis.com/']
]); ]);
export const buildRedirectModule = task(__filename, async () => { export const buildRedirectModule = task(import.meta.path, async () => {
const domains = Array.from(new Set(REDIRECT.map(([from]) => tldts.getHostname(from, { detectIp: false })))).filter(Boolean); const domains = Array.from(new Set(REDIRECT.map(([from]) => tldts.getHostname(from, { detectIp: false })))).filter(Boolean);
return compareAndWriteFile( return compareAndWriteFile(
@ -88,7 +88,7 @@ export const buildRedirectModule = task(__filename, async () => {
return `${src} ${to}$1 302`; return `${src} ${to}$1 302`;
}) })
], ],
path.resolve(__dirname, '../Modules/sukka_url_redirect.sgmodule') path.resolve(import.meta.dir, '../Modules/sukka_url_redirect.sgmodule')
); );
}); });

View File

@ -15,6 +15,7 @@ import { createDomainSorter } from './lib/stable-sort-domain';
import { traceSync, task } from './lib/trace-runner'; import { traceSync, task } from './lib/trace-runner';
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
import * as tldts from 'tldts'; import * as tldts from 'tldts';
import { SHARED_DESCRIPTION } from './lib/constants';
/** Whitelists */ /** Whitelists */
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST); const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
@ -23,7 +24,7 @@ const domainKeywordsSet: Set<string> = new Set();
/** @type {Set<string>} Dedupe domains included by DOMAIN-SUFFIX */ /** @type {Set<string>} Dedupe domains included by DOMAIN-SUFFIX */
const domainSuffixSet: Set<string> = new Set(); const domainSuffixSet: Set<string> = new Set();
export const buildRejectDomainSet = task(__filename, async () => { export const buildRejectDomainSet = task(import.meta.path, async () => {
/** @type Set<string> */ /** @type Set<string> */
const domainSets: Set<string> = new Set(); const domainSets: Set<string> = new Set();
@ -97,7 +98,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
let previousSize = domainSets.size; let previousSize = domainSets.size;
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`); console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'))) { for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'))) {
const l = processLine(line); const l = processLine(line);
if (l) { if (l) {
domainSets.add(l); domainSets.add(l);
@ -107,7 +108,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
previousSize = domainSets.size - previousSize; previousSize = domainSets.size - previousSize;
console.log(`Import ${previousSize} rules from reject_sukka.conf!`); console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/non_ip/reject.conf'))) { for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/reject.conf'))) {
if (line.startsWith('DOMAIN-KEYWORD')) { if (line.startsWith('DOMAIN-KEYWORD')) {
const [, ...keywords] = line.split(','); const [, ...keywords] = line.split(',');
domainKeywordsSet.add(keywords.join(',').trim()); domainKeywordsSet.add(keywords.join(',').trim());
@ -117,7 +118,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
} }
} }
for await (const line of readFileByLine(path.resolve(__dirname, '../List/domainset/reject_phishing.conf'))) { for await (const line of readFileByLine(path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'))) {
const l = processLine(line); const l = processLine(line);
if (l && l[0] === '.') { if (l && l[0] === '.') {
domainSuffixSet.add(l.slice(1)); domainSuffixSet.add(l.slice(1));
@ -196,9 +197,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
const domainset = traceSync('* Sort reject domainset', () => dudupedDominArray.sort(domainSorter)); const domainset = traceSync('* Sort reject domainset', () => dudupedDominArray.sort(domainSorter));
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining', 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
'', '',
@ -214,17 +213,17 @@ export const buildRejectDomainSet = task(__filename, async () => {
new Date(), new Date(),
domainset, domainset,
'domainset', 'domainset',
path.resolve(__dirname, '../List/domainset/reject.conf'), path.resolve(import.meta.dir, '../List/domainset/reject.conf'),
path.resolve(__dirname, '../Clash/domainset/reject.txt') path.resolve(import.meta.dir, '../Clash/domainset/reject.txt')
), ),
compareAndWriteFile( compareAndWriteFile(
rejectDomainsStats.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`), rejectDomainsStats.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`),
path.resolve(__dirname, '../List/internal/reject-stats.txt') path.resolve(import.meta.dir, '../List/internal/reject-stats.txt')
), ),
// Copy reject_sukka.conf for backward compatibility // Copy reject_sukka.conf for backward compatibility
fsp.cp( fsp.cp(
path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'), path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'),
path.resolve(__dirname, '../List/domainset/reject_sukka.conf'), path.resolve(import.meta.dir, '../List/domainset/reject_sukka.conf'),
{ force: true, recursive: true } { force: true, recursive: true }
) )
]); ]);

View File

@ -7,6 +7,7 @@ import { Sema } from 'async-sema';
import * as tldts from 'tldts'; import * as tldts from 'tldts';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { fetchWithRetry } from './lib/fetch-retry'; import { fetchWithRetry } from './lib/fetch-retry';
import { SHARED_DESCRIPTION } from './lib/constants';
const s = new Sema(3); const s = new Sema(3);
@ -57,7 +58,7 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> =>
} }
}; };
export const buildSpeedtestDomainSet = task(__filename, async () => { export const buildSpeedtestDomainSet = task(import.meta.path, async () => {
/** @type {Set<string>} */ /** @type {Set<string>} */
const domains: Set<string> = new Set([ const domains: Set<string> = new Set([
'.speedtest.net', '.speedtest.net',
@ -135,9 +136,9 @@ export const buildSpeedtestDomainSet = task(__filename, async () => {
const deduped = domainDeduper(Array.from(domains)).sort(domainSorter); const deduped = domainDeduper(Array.from(domains)).sort(domainSorter);
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe', '',
'GitHub: https://github.com/SukkaW/Surge' 'This file contains common speedtest endpoints.'
]; ];
return Promise.all(createRuleset( return Promise.all(createRuleset(
@ -146,8 +147,8 @@ export const buildSpeedtestDomainSet = task(__filename, async () => {
new Date(), new Date(),
deduped, deduped,
'domainset', 'domainset',
path.resolve(__dirname, '../List/domainset/speedtest.conf'), path.resolve(import.meta.dir, '../List/domainset/speedtest.conf'),
path.resolve(__dirname, '../Clash/domainset/speedtest.txt') path.resolve(import.meta.dir, '../Clash/domainset/speedtest.txt')
)); ));
}); });

View File

@ -5,6 +5,7 @@ import path from 'path';
import { createRuleset } from './lib/create-file'; import { createRuleset } from './lib/create-file';
import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream'; import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream';
import { SHARED_DESCRIPTION } from './lib/constants';
const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => { const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => {
return [ return [
@ -12,25 +13,21 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
...createRuleset( ...createRuleset(
`Sukka's Ruleset - Stream Services: ${title}`, `Sukka's Ruleset - Stream Services: ${title}`,
[ [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
...streamServices.map((i: { name: any; }) => `- ${i.name}`) ...streamServices.map((i: { name: any; }) => `- ${i.name}`)
], ],
new Date(), new Date(),
streamServices.flatMap((i: { rules: any; }) => i.rules), streamServices.flatMap((i: { rules: any; }) => i.rules),
'ruleset', 'ruleset',
path.resolve(__dirname, `../List/non_ip/${fileId}.conf`), path.resolve(import.meta.dir, `../List/non_ip/${fileId}.conf`),
path.resolve(__dirname, `../Clash/non_ip/${fileId}.txt`) path.resolve(import.meta.dir, `../Clash/non_ip/${fileId}.txt`)
), ),
// IP // IP
...createRuleset( ...createRuleset(
`Sukka's Ruleset - Stream Services' IPs: ${title}`, `Sukka's Ruleset - Stream Services' IPs: ${title}`,
[ [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'', '',
...streamServices.map((i: { name: any; }) => `- ${i.name}`) ...streamServices.map((i: { name: any; }) => `- ${i.name}`)
], ],
@ -44,13 +41,13 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
: [] : []
)), )),
'ruleset', 'ruleset',
path.resolve(__dirname, `../List/ip/${fileId}.conf`), path.resolve(import.meta.dir, `../List/ip/${fileId}.conf`),
path.resolve(__dirname, `../Clash/ip/${fileId}.txt`) path.resolve(import.meta.dir, `../Clash/ip/${fileId}.txt`)
) )
]; ];
}; };
export const buildStreamService = task(__filename, async () => { export const buildStreamService = task(import.meta.path, async () => {
return Promise.all([ return Promise.all([
...createRulesetForStreamService('stream', 'All', ALL), ...createRulesetForStreamService('stream', 'All', ALL),
...createRulesetForStreamService('stream_us', 'North America', NORTH_AMERICA), ...createRulesetForStreamService('stream_us', 'North America', NORTH_AMERICA),

View File

@ -6,14 +6,14 @@ import { isIPv4, isIPv6 } from 'net';
import { processLine } from './lib/process-line'; import { processLine } from './lib/process-line';
import { createRuleset } from './lib/create-file'; import { createRuleset } from './lib/create-file';
import { task } from './lib/trace-runner'; import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildTelegramCIDR = task(__filename, async () => { export const buildTelegramCIDR = task(import.meta.path, async () => {
const resp = await fetchWithRetry('https://core.telegram.org/resources/cidr.txt', defaultRequestInit); const resp = await fetchWithRetry('https://core.telegram.org/resources/cidr.txt', defaultRequestInit);
const lastModified = resp.headers.get('last-modified'); const lastModified = resp.headers.get('last-modified');
const date = lastModified ? new Date(lastModified) : new Date(); const date = lastModified ? new Date(lastModified) : new Date();
/** @type {string[]} */ const results: string[] = [];
const results = [];
for await (const line of createReadlineInterfaceFromResponse(resp)) { for await (const line of createReadlineInterfaceFromResponse(resp)) {
const cidr = processLine(line); const cidr = processLine(line);
@ -33,9 +33,7 @@ export const buildTelegramCIDR = task(__filename, async () => {
} }
const description = [ const description = [
'License: AGPL 3.0', ...SHARED_DESCRIPTION,
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'Data from:', 'Data from:',
' - https://core.telegram.org/resources/cidr.txt' ' - https://core.telegram.org/resources/cidr.txt'
]; ];
@ -46,8 +44,8 @@ export const buildTelegramCIDR = task(__filename, async () => {
date, date,
results, results,
'ruleset', 'ruleset',
path.resolve(__dirname, '../List/ip/telegram.conf'), path.resolve(import.meta.dir, '../List/ip/telegram.conf'),
path.resolve(__dirname, '../Clash/ip/telegram.txt') path.resolve(import.meta.dir, '../Clash/ip/telegram.txt')
)); ));
}); });

View File

@ -13,12 +13,12 @@ import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
const IS_READING_BUILD_OUTPUT = 1 << 2; const IS_READING_BUILD_OUTPUT = 1 << 2;
const ALL_FILES_EXISTS = 1 << 3; const ALL_FILES_EXISTS = 1 << 3;
export const downloadPreviousBuild = task(__filename, async () => { export const downloadPreviousBuild = task(import.meta.path, async () => {
const buildOutputList: string[] = []; const buildOutputList: string[] = [];
let flag = 1 | ALL_FILES_EXISTS; let flag = 1 | ALL_FILES_EXISTS;
for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) { for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
if (line === '# $ build output') { if (line === '# $ build output') {
flag = flag | IS_READING_BUILD_OUTPUT; flag = flag | IS_READING_BUILD_OUTPUT;
continue; continue;
@ -31,7 +31,7 @@ export const downloadPreviousBuild = task(__filename, async () => {
if (!isCI) { if (!isCI) {
// Bun.file().exists() doesn't check directory // Bun.file().exists() doesn't check directory
if (!fs.existsSync(path.join(__dirname, '..', line))) { if (!fs.existsSync(path.join(import.meta.dir, '..', line))) {
flag = flag & ~ALL_FILES_EXISTS; flag = flag & ~ALL_FILES_EXISTS;
} }
} }
@ -74,7 +74,7 @@ export const downloadPreviousBuild = task(__filename, async () => {
const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, ''); const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, '');
const targetPath = path.join(__dirname, '..', relativeEntryPath); const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
await fsp.mkdir(path.dirname(targetPath), { recursive: true }); await fsp.mkdir(path.dirname(targetPath), { recursive: true });
const targetFile = Bun.file(targetPath); const targetFile = Bun.file(targetPath);
@ -97,8 +97,8 @@ export const downloadPreviousBuild = task(__filename, async () => {
); );
}); });
export const downloadPublicSuffixList = task(__filename, async () => { export const downloadPublicSuffixList = task(import.meta.path, async () => {
const publicSuffixDir = path.resolve(__dirname, '../node_modules/.cache'); const publicSuffixDir = path.resolve(import.meta.dir, '../node_modules/.cache');
const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt'); const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
const [resp] = await Promise.all([ const [resp] = await Promise.all([

5
Build/lib/constants.ts Normal file
View File

@ -0,0 +1,5 @@
export const SHARED_DESCRIPTION = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
] as const;

View File

@ -10,7 +10,7 @@ export function readFileByLine(file: string | BunFile) {
return file.stream().pipeThrough(new PolyfillTextDecoderStream()).pipeThrough(new TextLineStream()); return file.stream().pipeThrough(new PolyfillTextDecoderStream()).pipeThrough(new TextLineStream());
} }
export async function createReadlineInterfaceFromResponse(resp: Response) { export function createReadlineInterfaceFromResponse(resp: Response) {
if (!resp.body) { if (!resp.body) {
throw new Error('Failed to fetch remote text'); throw new Error('Failed to fetch remote text');
} }

View File

@ -4,7 +4,7 @@ import { traceAsync } from './trace-runner';
import { defaultRequestInit, fetchWithRetry } from './fetch-retry'; import { defaultRequestInit, fetchWithRetry } from './fetch-retry';
import type { PublicSuffixList } from 'gorhill-publicsuffixlist'; import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
const publicSuffixPath = path.resolve(__dirname, '../../node_modules/.cache/public_suffix_list_dat.txt'); const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt');
const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => { const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
const customFetch = async (url: string | URL) => Bun.file(url); const customFetch = async (url: string | URL) => Bun.file(url);

View File

@ -23,7 +23,7 @@ export const processLine = (line: string): string | null => {
return trimmed; return trimmed;
}; };
export const processLineFromReadline = async (rl: AsyncGenerator<string>): Promise<string[]> => { export const processLineFromReadline = async (rl: AsyncGenerator<string> | ReadableStream<string>): Promise<string[]> => {
const res: string[] = []; const res: string[] = [];
for await (const line of rl) { for await (const line of rl) {
const l: string | null = processLine(line); const l: string | null = processLine(line);

View File

@ -13,7 +13,6 @@
// limitations under the License. // limitations under the License.
// Polyfill for TextEncoderStream and TextDecoderStream // Polyfill for TextEncoderStream and TextDecoderStream
// Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun. // Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun.
export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, string> { export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, string> {
@ -23,10 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, strin
constructor( constructor(
encoding: Encoding = 'utf-8', encoding: Encoding = 'utf-8',
{ { fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {},
fatal = false,
ignoreBOM = false,
}: ConstructorParameters<typeof TextDecoder>[1] = {},
) { ) {
const decoder = new TextDecoder(encoding, { fatal, ignoreBOM }); const decoder = new TextDecoder(encoding, { fatal, ignoreBOM });
super({ super({

View File

@ -19,57 +19,55 @@ interface TextLineStreamOptions {
* ``` * ```
*/ */
export class TextLineStream extends TransformStream<string, string> { export class TextLineStream extends TransformStream<string, string> {
private __allowCR: boolean;
private __buf = ''; private __buf = '';
constructor(options?: TextLineStreamOptions) { constructor(options?: TextLineStreamOptions) {
const allowCR = options?.allowCR ?? false;
super({ super({
transform: (chunk, controller) => this.handle(chunk, controller), transform: (chunk, controller) => {
chunk = this.__buf + chunk;
for (; ;) {
const lfIndex = chunk.indexOf('\n');
if (allowCR) {
const crIndex = chunk.indexOf('\r');
if (
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
(lfIndex === -1 || (lfIndex - 1) > crIndex)
) {
controller.enqueue(chunk.slice(0, crIndex));
chunk = chunk.slice(crIndex + 1);
continue;
}
}
if (lfIndex !== -1) {
let crOrLfIndex = lfIndex;
if (chunk[lfIndex - 1] === '\r') {
crOrLfIndex--;
}
controller.enqueue(chunk.slice(0, crOrLfIndex));
chunk = chunk.slice(lfIndex + 1);
continue;
}
break;
}
this.__buf = chunk;
},
flush: (controller) => { flush: (controller) => {
if (this.__buf.length > 0) { if (this.__buf.length > 0) {
if ( if (allowCR && this.__buf[this.__buf.length - 1] === '\r') {
this.__allowCR && controller.enqueue(this.__buf.slice(0, -1));
this.__buf[this.__buf.length - 1] === '\r' } else {
) controller.enqueue(this.__buf.slice(0, -1)); controller.enqueue(this.__buf);
else controller.enqueue(this.__buf); };
} }
}, },
}); });
this.__allowCR = options?.allowCR ?? false;
}
private handle(chunk: string, controller: TransformStreamDefaultController<string>) {
chunk = this.__buf + chunk;
for (;;) {
const lfIndex = chunk.indexOf('\n');
if (this.__allowCR) {
const crIndex = chunk.indexOf('\r');
if (
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
(lfIndex === -1 || (lfIndex - 1) > crIndex)
) {
controller.enqueue(chunk.slice(0, crIndex));
chunk = chunk.slice(crIndex + 1);
continue;
}
}
if (lfIndex !== -1) {
let crOrLfIndex = lfIndex;
if (chunk[lfIndex - 1] === '\r') {
crOrLfIndex--;
}
controller.enqueue(chunk.slice(0, crOrLfIndex));
chunk = chunk.slice(lfIndex + 1);
continue;
}
break;
}
this.__buf = chunk;
} }
} }

View File

@ -24,8 +24,8 @@ export interface TaskResult {
readonly taskName: string; readonly taskName: string;
} }
const task = <T>(__filename: string, fn: () => Promise<T>, customname: string | null = null) => { const task = <T>(importMetaPath: string, fn: () => Promise<T>, customname: string | null = null) => {
const taskName = customname ?? path.basename(__filename, path.extname(__filename)); const taskName = customname ?? path.basename(importMetaPath, path.extname(importMetaPath));
return async () => { return async () => {
console.log(`🏃 [${taskName}] Start executing`); console.log(`🏃 [${taskName}] Start executing`);
const start = performance.now(); const start = performance.now();

View File

@ -15,7 +15,7 @@ const SPECIAL_SUFFIXES = new Set([
]); ]);
const validateDomainSet = async (filePath: string) => { const validateDomainSet = async (filePath: string) => {
for await (const l of readFileByLine(path.resolve(__dirname, '../List/domainset', filePath))) { for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/domainset', filePath))) {
// starts with # // starts with #
const line = processLine(l); const line = processLine(l);
if (!line) { if (!line) {
@ -38,7 +38,7 @@ const validateDomainSet = async (filePath: string) => {
const _validateRuleset = async (filePath: string) => { const _validateRuleset = async (filePath: string) => {
console.log(`[${filePath}]`); console.log(`[${filePath}]`);
for await (const l of readFileByLine(path.resolve(__dirname, '../List/non_ip', filePath))) { for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/non_ip', filePath))) {
// starts with # // starts with #
const line = processLine(l); const line = processLine(l);
if (!line) { if (!line) {
@ -58,13 +58,13 @@ const _validateRuleset = async (filePath: string) => {
} }
}; };
export const validate = task(__filename, async () => { export const validate = task(import.meta.path, async () => {
// const [domainsetFiles, _rulesetFiles] = await Promise.all([ // const [domainsetFiles, _rulesetFiles] = await Promise.all([
// listDir(path.resolve(__dirname, '../List/domainset')), // listDir(path.resolve(import.meta.dir, '../List/domainset')),
// listDir(path.resolve(__dirname, '../List/non_ip')) // listDir(path.resolve(import.meta.dir, '../List/non_ip'))
// ]); // ]);
return Promise.all([ return Promise.all([
listDir(path.resolve(__dirname, '../List/domainset')) listDir(path.resolve(import.meta.dir, '../List/domainset'))
.then(domainsetFiles => Promise.all(domainsetFiles.map(file => validateDomainSet(file)))) .then(domainsetFiles => Promise.all(domainsetFiles.map(file => validateDomainSet(file))))
// rulesetFiles.map(file => validateRuleset(file)) // rulesetFiles.map(file => validateRuleset(file))
]); ]);

View File

@ -145,6 +145,7 @@ mirror.navercorp.com
fedorapeople.org fedorapeople.org
.cloudflaremirrors.com .cloudflaremirrors.com
.repo.jing.rocks .repo.jing.rocks
mirrors.edge.kernel.org
# WhatPulse # WhatPulse
releases.whatpulse.org releases.whatpulse.org
# GIMP # GIMP