Chore: build infra changes

This commit is contained in:
SukkaW 2023-11-23 22:09:01 +08:00
parent 265df07893
commit 101913e4f1
26 changed files with 187 additions and 191 deletions

View File

@ -5,6 +5,7 @@ import { createRuleset } from './lib/create-file';
import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib/fetch-remote-text-by-line';
import { processLine } from './lib/process-line';
import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
const getBogusNxDomainIPs = async () => {
/** @type {string[]} */
@ -22,12 +23,12 @@ const getBogusNxDomainIPs = async () => {
return result;
};
export const buildAntiBogusDomain = task(__filename, async () => {
export const buildAntiBogusDomain = task(import.meta.path, async () => {
const bogusIpPromise = getBogusNxDomainIPs();
/** @type {string[]} */
const result = [];
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/ip/reject.conf'))) {
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/ip/reject.conf'))) {
if (line === '# --- [Anti Bogus Domain Replace Me] ---') {
(await bogusIpPromise).forEach(rule => result.push(rule));
continue;
@ -40,9 +41,7 @@ export const buildAntiBogusDomain = task(__filename, async () => {
}
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'This file contains known addresses that are hijacking NXDOMAIN results returned by DNS servers.',
'',
@ -56,8 +55,8 @@ export const buildAntiBogusDomain = task(__filename, async () => {
new Date(),
result,
'ruleset',
path.resolve(__dirname, '../List/ip/reject.conf'),
path.resolve(__dirname, '../Clash/ip/reject.txt')
path.resolve(import.meta.dir, '../List/ip/reject.conf'),
path.resolve(import.meta.dir, '../Clash/ip/reject.txt')
));
});

View File

@ -3,14 +3,13 @@ import path from 'path';
import { createRuleset } from './lib/create-file';
import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildAppleCdn = task(__filename, async () => {
export const buildAppleCdn = task(import.meta.path, async () => {
const res = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf');
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'This file contains Apple\'s domains using their China mainland CDN servers.',
'',
@ -28,8 +27,8 @@ export const buildAppleCdn = task(__filename, async () => {
new Date(),
ruleset,
'ruleset',
path.resolve(__dirname, '../List/non_ip/apple_cdn.conf'),
path.resolve(__dirname, '../Clash/non_ip/apple_cdn.txt')
path.resolve(import.meta.dir, '../List/non_ip/apple_cdn.conf'),
path.resolve(import.meta.dir, '../Clash/non_ip/apple_cdn.txt')
),
...createRuleset(
'Sukka\'s Ruleset - Apple CDN',
@ -37,8 +36,8 @@ export const buildAppleCdn = task(__filename, async () => {
new Date(),
domainset,
'domainset',
path.resolve(__dirname, '../List/domainset/apple_cdn.conf'),
path.resolve(__dirname, '../Clash/domainset/apple_cdn.txt')
path.resolve(import.meta.dir, '../List/domainset/apple_cdn.conf'),
path.resolve(import.meta.dir, '../Clash/domainset/apple_cdn.txt')
)
]);
});

View File

@ -4,8 +4,9 @@ import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib
import { createTrie } from './lib/trie';
import { task } from './lib/trace-runner';
import { processLine } from './lib/process-line';
import { SHARED_DESCRIPTION } from './lib/constants';
const publicSuffixPath: string = path.resolve(__dirname, '../node_modules/.cache/public_suffix_list_dat.txt');
const publicSuffixPath: string = path.resolve(import.meta.dir, '../node_modules/.cache/public_suffix_list_dat.txt');
const getS3OSSDomains = async (): Promise<Set<string>> => {
const trie = createTrie();
@ -55,13 +56,13 @@ const getS3OSSDomains = async (): Promise<Set<string>> => {
return S3OSSDomains;
};
const buildCdnConf = task(__filename, async () => {
const buildCdnConf = task(import.meta.path, async () => {
/** @type {string[]} */
const cdnDomainsList: string[] = [];
const getS3OSSDomainsPromise: Promise<Set<string>> = getS3OSSDomains();
for await (const l of readFileByLine(path.resolve(__dirname, '../Source/non_ip/cdn.conf'))) {
for await (const l of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf'))) {
if (l === '# --- [AWS S3 Replace Me] ---') {
(await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
continue;
@ -73,9 +74,7 @@ const buildCdnConf = task(__filename, async () => {
}
const description: string[] = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'This file contains object storage and static assets CDN domains.'
];
@ -86,8 +85,8 @@ const buildCdnConf = task(__filename, async () => {
new Date(),
cdnDomainsList,
'ruleset',
path.resolve(__dirname, '../List/non_ip/cdn.conf'),
path.resolve(__dirname, '../Clash/non_ip/cdn.txt')
path.resolve(import.meta.dir, '../List/non_ip/cdn.conf'),
path.resolve(import.meta.dir, '../Clash/non_ip/cdn.txt')
));
});

View File

@ -10,7 +10,7 @@ const EXCLUDE_CIDRS = [
'223.120.0.0/15'
];
export const buildChnCidr = task(__filename, async () => {
export const buildChnCidr = task(import.meta.path, async () => {
const [{ exclude }, cidr] = await Promise.all([
import('cidr-tools-wasm'),
processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt'))
@ -18,6 +18,7 @@ export const buildChnCidr = task(__filename, async () => {
const filteredCidr = exclude(cidr, EXCLUDE_CIDRS, true);
// Can not use SHARED_DESCRIPTION here as different license
const description = [
'License: CC BY-SA 2.0',
'Homepage: https://ruleset.skk.moe',
@ -34,7 +35,7 @@ export const buildChnCidr = task(__filename, async () => {
new Date(),
filteredCidr.map(i => `IP-CIDR,${i}`)
),
pathResolve(__dirname, '../List/ip/china_ip.conf')
pathResolve(import.meta.dir, '../List/ip/china_ip.conf')
),
compareAndWriteFile(
withBannerArray(
@ -43,7 +44,7 @@ export const buildChnCidr = task(__filename, async () => {
new Date(),
filteredCidr
),
pathResolve(__dirname, '../Clash/ip/china_ip.txt')
pathResolve(import.meta.dir, '../Clash/ip/china_ip.txt')
)
]);
});

View File

@ -7,16 +7,17 @@ import { processLine } from './lib/process-line';
import { createRuleset } from './lib/create-file';
import { domainDeduper } from './lib/domain-deduper';
import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
const MAGIC_COMMAND_SKIP = '# $ custom_build_script';
const MAGIC_COMMAND_TITLE = '# $ meta_title ';
const MAGIC_COMMAND_DESCRIPTION = '# $ meta_description ';
const sourceDir = path.resolve(__dirname, '../Source');
const outputSurgeDir = path.resolve(__dirname, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash');
const sourceDir = path.resolve(import.meta.dir, '../Source');
const outputSurgeDir = path.resolve(import.meta.dir, '../List');
const outputClashDir = path.resolve(import.meta.dir, '../Clash');
export const buildCommon = task(__filename, async () => {
export const buildCommon = task(import.meta.path, async () => {
const promises: Promise<unknown>[] = [];
const pw = new PathScurry(sourceDir);
@ -49,30 +50,38 @@ if (import.meta.main) {
}
const processFile = async (sourcePath: string) => {
console.log('Processing', sourcePath);
const lines: string[] = [];
let title = '';
const descriptions: string[] = [];
for await (const line of readFileByLine(sourcePath)) {
if (line === MAGIC_COMMAND_SKIP) {
return;
}
if (line.startsWith(MAGIC_COMMAND_TITLE)) {
title = line.slice(MAGIC_COMMAND_TITLE.length).trim();
continue;
}
try {
for await (const line of readFileByLine(sourcePath)) {
if (line === MAGIC_COMMAND_SKIP) {
return;
}
if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) {
descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim());
continue;
}
if (line.startsWith(MAGIC_COMMAND_TITLE)) {
title = line.slice(MAGIC_COMMAND_TITLE.length).trim();
continue;
}
const l = processLine(line);
if (l) {
lines.push(l);
if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) {
descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim());
continue;
}
const l = processLine(line);
if (l) {
lines.push(l);
}
}
} catch (e) {
console.error('Error processing', sourcePath);
console.trace(e);
}
return [title, descriptions, lines] as const;
@ -85,9 +94,7 @@ async function transformDomainset(sourcePath: string, relativePath: string) {
const deduped = domainDeduper(lines);
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
...(
descriptions.length
? ['', ...descriptions]
@ -115,9 +122,7 @@ async function transformRuleset(sourcePath: string, relativePath: string) {
const [title, descriptions, lines] = res;
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
...(
descriptions.length
? ['', ...descriptions]

View File

@ -5,9 +5,10 @@ import { readFileByLine } from './lib/fetch-remote-text-by-line';
import { processLineFromReadline } from './lib/process-line';
import { compareAndWriteFile, createRuleset } from './lib/create-file';
import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildDomesticRuleset = task(__filename, async () => {
const results = await processLineFromReadline(readFileByLine(path.resolve(__dirname, '../Source/non_ip/domestic.conf')));
export const buildDomesticRuleset = task(import.meta.path, async () => {
const results = await processLineFromReadline(readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/domestic.conf')));
results.push(
...Object.entries(DOMESTICS)
@ -22,9 +23,7 @@ export const buildDomesticRuleset = task(__filename, async () => {
);
const rulesetDescription = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'This file contains known addresses that are avaliable in the Mainland China.'
];
@ -36,8 +35,8 @@ export const buildDomesticRuleset = task(__filename, async () => {
new Date(),
results,
'ruleset',
path.resolve(__dirname, '../List/non_ip/domestic.conf'),
path.resolve(__dirname, '../Clash/non_ip/domestic.txt')
path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'),
path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt')
),
compareAndWriteFile(
[
@ -53,7 +52,7 @@ export const buildDomesticRuleset = task(__filename, async () => {
])
)
],
path.resolve(__dirname, '../Modules/sukka_local_dns_mapping.sgmodule')
path.resolve(import.meta.dir, '../Modules/sukka_local_dns_mapping.sgmodule')
)
]);
});

View File

@ -12,7 +12,7 @@ import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, '\\$&');
export const buildInternalCDNDomains = task(__filename, async () => {
export const buildInternalCDNDomains = task(import.meta.path, async () => {
const set = new Set<string>();
const keywords = new Set();
@ -63,16 +63,16 @@ export const buildInternalCDNDomains = task(__filename, async () => {
};
await Promise.all([
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/cdn.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global_plus.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/my_proxy.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/stream.conf')),
processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/telegram.conf')),
processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/cdn.conf')),
processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/download.conf')),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true })
fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]);
return compareAndWriteFile(
@ -80,7 +80,7 @@ export const buildInternalCDNDomains = task(__filename, async () => {
...Array.from(set).sort(domainSorter).map(i => `SUFFIX,${i}`),
...Array.from(keywords).sort().map(i => `REGEX,${i}`)
],
path.resolve(__dirname, '../List/internal/cdn.txt')
path.resolve(import.meta.dir, '../List/internal/cdn.txt')
);
});

View File

@ -4,15 +4,15 @@ import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
import { task } from './lib/trace-runner';
import { compareAndWriteFile } from './lib/create-file';
export const buildInternalChnDomains = task(__filename, async () => {
export const buildInternalChnDomains = task(import.meta.path, async () => {
const [result] = await Promise.all([
parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true })
fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]);
return compareAndWriteFile(
result.map(line => `SUFFIX,${line}`),
path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt')
path.resolve(import.meta.dir, '../List/internal/accelerated-china-domains.txt')
);
});

View File

@ -22,11 +22,11 @@ const RESERVED_IPV4_CIDR = [
'240.0.0.0/4'
];
export const buildInternalReverseChnCIDR = task(__filename, async () => {
export const buildInternalReverseChnCIDR = task(import.meta.path, async () => {
const [{ exclude }, cidr] = await Promise.all([
import('cidr-tools-wasm'),
processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')),
fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true })
fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true })
]);
const reversedCidr = exclude(
@ -40,7 +40,7 @@ export const buildInternalReverseChnCIDR = task(__filename, async () => {
true
);
return Bun.write(path.resolve(__dirname, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`);
return Bun.write(path.resolve(import.meta.dir, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`);
});
if (import.meta.main) {

View File

@ -8,6 +8,7 @@ import createTrie from './lib/trie';
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
import { createCachedGorhillGetDomain } from './lib/cached-tld-parse';
import * as tldts from 'tldts';
import { SHARED_DESCRIPTION } from './lib/constants';
const WHITELIST_DOMAIN = new Set([
'w3s.link',
@ -63,7 +64,7 @@ const BLACK_TLD = new Set([
'com.cn'
]);
export const buildPhishingDomainSet = task(__filename, async () => {
export const buildPhishingDomainSet = task(import.meta.path, async () => {
const [{ black: domainSet }, gorhill] = await Promise.all([
processFilterRules(
'https://curbengh.github.io/phishing-filter/phishing-filter-agh.txt',
@ -162,9 +163,7 @@ export const buildPhishingDomainSet = task(__filename, async () => {
.sort(domainSorter));
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'The domainset supports enhanced phishing protection',
'Build from:',
@ -177,8 +176,8 @@ export const buildPhishingDomainSet = task(__filename, async () => {
new Date(),
results,
'domainset',
path.resolve(__dirname, '../List/domainset/reject_phishing.conf'),
path.resolve(__dirname, '../Clash/domainset/reject_phishing.txt')
path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'),
path.resolve(import.meta.dir, '../Clash/domainset/reject_phishing.txt')
));
});

View File

@ -3,8 +3,8 @@ import path from 'path';
import fsp from 'fs/promises'
import { task } from './lib/trace-runner';
const rootPath = path.resolve(__dirname, '../');
const publicPath = path.resolve(__dirname, '../public');
const rootPath = path.resolve(import.meta.dir, '../');
const publicPath = path.resolve(import.meta.dir, '../public');
const folderAndFilesToBeDeployed = [
'Assets',
@ -15,7 +15,7 @@ const folderAndFilesToBeDeployed = [
'LICENSE'
];
export const buildPublicHtml = task(__filename, async () => {
export const buildPublicHtml = task(import.meta.path, async () => {
await fsp.mkdir(publicPath, { recursive: true });
await Promise.all(folderAndFilesToBeDeployed.map(dir => fsp.cp(
path.resolve(rootPath, dir),

View File

@ -71,7 +71,7 @@ const REDIRECT = /** @type {const} */ ([
['googleajax.wp-china-yes.net/', 'https://ajax.googleapis.com/']
]);
export const buildRedirectModule = task(__filename, async () => {
export const buildRedirectModule = task(import.meta.path, async () => {
const domains = Array.from(new Set(REDIRECT.map(([from]) => tldts.getHostname(from, { detectIp: false })))).filter(Boolean);
return compareAndWriteFile(
@ -88,7 +88,7 @@ export const buildRedirectModule = task(__filename, async () => {
return `${src} ${to}$1 302`;
})
],
path.resolve(__dirname, '../Modules/sukka_url_redirect.sgmodule')
path.resolve(import.meta.dir, '../Modules/sukka_url_redirect.sgmodule')
);
});

View File

@ -15,6 +15,7 @@ import { createDomainSorter } from './lib/stable-sort-domain';
import { traceSync, task } from './lib/trace-runner';
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
import * as tldts from 'tldts';
import { SHARED_DESCRIPTION } from './lib/constants';
/** Whitelists */
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
@ -23,7 +24,7 @@ const domainKeywordsSet: Set<string> = new Set();
/** @type {Set<string>} Dedupe domains included by DOMAIN-SUFFIX */
const domainSuffixSet: Set<string> = new Set();
export const buildRejectDomainSet = task(__filename, async () => {
export const buildRejectDomainSet = task(import.meta.path, async () => {
/** @type Set<string> */
const domainSets: Set<string> = new Set();
@ -97,7 +98,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
let previousSize = domainSets.size;
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'))) {
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'))) {
const l = processLine(line);
if (l) {
domainSets.add(l);
@ -107,7 +108,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
previousSize = domainSets.size - previousSize;
console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
for await (const line of readFileByLine(path.resolve(__dirname, '../Source/non_ip/reject.conf'))) {
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/reject.conf'))) {
if (line.startsWith('DOMAIN-KEYWORD')) {
const [, ...keywords] = line.split(',');
domainKeywordsSet.add(keywords.join(',').trim());
@ -117,7 +118,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
}
}
for await (const line of readFileByLine(path.resolve(__dirname, '../List/domainset/reject_phishing.conf'))) {
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'))) {
const l = processLine(line);
if (l && l[0] === '.') {
domainSuffixSet.add(l.slice(1));
@ -196,9 +197,7 @@ export const buildRejectDomainSet = task(__filename, async () => {
const domainset = traceSync('* Sort reject domainset', () => dudupedDominArray.sort(domainSorter));
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
'',
@ -214,17 +213,17 @@ export const buildRejectDomainSet = task(__filename, async () => {
new Date(),
domainset,
'domainset',
path.resolve(__dirname, '../List/domainset/reject.conf'),
path.resolve(__dirname, '../Clash/domainset/reject.txt')
path.resolve(import.meta.dir, '../List/domainset/reject.conf'),
path.resolve(import.meta.dir, '../Clash/domainset/reject.txt')
),
compareAndWriteFile(
rejectDomainsStats.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`),
path.resolve(__dirname, '../List/internal/reject-stats.txt')
path.resolve(import.meta.dir, '../List/internal/reject-stats.txt')
),
// Copy reject_sukka.conf for backward compatibility
fsp.cp(
path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'),
path.resolve(__dirname, '../List/domainset/reject_sukka.conf'),
path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'),
path.resolve(import.meta.dir, '../List/domainset/reject_sukka.conf'),
{ force: true, recursive: true }
)
]);

View File

@ -7,6 +7,7 @@ import { Sema } from 'async-sema';
import * as tldts from 'tldts';
import { task } from './lib/trace-runner';
import { fetchWithRetry } from './lib/fetch-retry';
import { SHARED_DESCRIPTION } from './lib/constants';
const s = new Sema(3);
@ -57,7 +58,7 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> =>
}
};
export const buildSpeedtestDomainSet = task(__filename, async () => {
export const buildSpeedtestDomainSet = task(import.meta.path, async () => {
/** @type {Set<string>} */
const domains: Set<string> = new Set([
'.speedtest.net',
@ -135,9 +136,9 @@ export const buildSpeedtestDomainSet = task(__filename, async () => {
const deduped = domainDeduper(Array.from(domains)).sort(domainSorter);
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge'
...SHARED_DESCRIPTION,
'',
'This file contains common speedtest endpoints.'
];
return Promise.all(createRuleset(
@ -146,8 +147,8 @@ export const buildSpeedtestDomainSet = task(__filename, async () => {
new Date(),
deduped,
'domainset',
path.resolve(__dirname, '../List/domainset/speedtest.conf'),
path.resolve(__dirname, '../Clash/domainset/speedtest.txt')
path.resolve(import.meta.dir, '../List/domainset/speedtest.conf'),
path.resolve(import.meta.dir, '../Clash/domainset/speedtest.txt')
));
});

View File

@ -5,6 +5,7 @@ import path from 'path';
import { createRuleset } from './lib/create-file';
import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream';
import { SHARED_DESCRIPTION } from './lib/constants';
const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => {
return [
@ -12,25 +13,21 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
...createRuleset(
`Sukka's Ruleset - Stream Services: ${title}`,
[
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
...streamServices.map((i: { name: any; }) => `- ${i.name}`)
],
new Date(),
streamServices.flatMap((i: { rules: any; }) => i.rules),
'ruleset',
path.resolve(__dirname, `../List/non_ip/${fileId}.conf`),
path.resolve(__dirname, `../Clash/non_ip/${fileId}.txt`)
path.resolve(import.meta.dir, `../List/non_ip/${fileId}.conf`),
path.resolve(import.meta.dir, `../Clash/non_ip/${fileId}.txt`)
),
// IP
...createRuleset(
`Sukka's Ruleset - Stream Services' IPs: ${title}`,
[
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'',
...streamServices.map((i: { name: any; }) => `- ${i.name}`)
],
@ -44,13 +41,13 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
: []
)),
'ruleset',
path.resolve(__dirname, `../List/ip/${fileId}.conf`),
path.resolve(__dirname, `../Clash/ip/${fileId}.txt`)
path.resolve(import.meta.dir, `../List/ip/${fileId}.conf`),
path.resolve(import.meta.dir, `../Clash/ip/${fileId}.txt`)
)
];
};
export const buildStreamService = task(__filename, async () => {
export const buildStreamService = task(import.meta.path, async () => {
return Promise.all([
...createRulesetForStreamService('stream', 'All', ALL),
...createRulesetForStreamService('stream_us', 'North America', NORTH_AMERICA),

View File

@ -6,14 +6,14 @@ import { isIPv4, isIPv6 } from 'net';
import { processLine } from './lib/process-line';
import { createRuleset } from './lib/create-file';
import { task } from './lib/trace-runner';
import { SHARED_DESCRIPTION } from './lib/constants';
export const buildTelegramCIDR = task(__filename, async () => {
export const buildTelegramCIDR = task(import.meta.path, async () => {
const resp = await fetchWithRetry('https://core.telegram.org/resources/cidr.txt', defaultRequestInit);
const lastModified = resp.headers.get('last-modified');
const date = lastModified ? new Date(lastModified) : new Date();
/** @type {string[]} */
const results = [];
const results: string[] = [];
for await (const line of createReadlineInterfaceFromResponse(resp)) {
const cidr = processLine(line);
@ -33,9 +33,7 @@ export const buildTelegramCIDR = task(__filename, async () => {
}
const description = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
...SHARED_DESCRIPTION,
'Data from:',
' - https://core.telegram.org/resources/cidr.txt'
];
@ -46,8 +44,8 @@ export const buildTelegramCIDR = task(__filename, async () => {
date,
results,
'ruleset',
path.resolve(__dirname, '../List/ip/telegram.conf'),
path.resolve(__dirname, '../Clash/ip/telegram.txt')
path.resolve(import.meta.dir, '../List/ip/telegram.conf'),
path.resolve(import.meta.dir, '../Clash/ip/telegram.txt')
));
});

View File

@ -13,12 +13,12 @@ import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
const IS_READING_BUILD_OUTPUT = 1 << 2;
const ALL_FILES_EXISTS = 1 << 3;
export const downloadPreviousBuild = task(__filename, async () => {
export const downloadPreviousBuild = task(import.meta.path, async () => {
const buildOutputList: string[] = [];
let flag = 1 | ALL_FILES_EXISTS;
for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) {
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
if (line === '# $ build output') {
flag = flag | IS_READING_BUILD_OUTPUT;
continue;
@ -31,7 +31,7 @@ export const downloadPreviousBuild = task(__filename, async () => {
if (!isCI) {
// Bun.file().exists() doesn't check directory
if (!fs.existsSync(path.join(__dirname, '..', line))) {
if (!fs.existsSync(path.join(import.meta.dir, '..', line))) {
flag = flag & ~ALL_FILES_EXISTS;
}
}
@ -74,7 +74,7 @@ export const downloadPreviousBuild = task(__filename, async () => {
const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, '');
const targetPath = path.join(__dirname, '..', relativeEntryPath);
const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
await fsp.mkdir(path.dirname(targetPath), { recursive: true });
const targetFile = Bun.file(targetPath);
@ -97,8 +97,8 @@ export const downloadPreviousBuild = task(__filename, async () => {
);
});
export const downloadPublicSuffixList = task(__filename, async () => {
const publicSuffixDir = path.resolve(__dirname, '../node_modules/.cache');
export const downloadPublicSuffixList = task(import.meta.path, async () => {
const publicSuffixDir = path.resolve(import.meta.dir, '../node_modules/.cache');
const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt');
const [resp] = await Promise.all([

5
Build/lib/constants.ts Normal file
View File

@ -0,0 +1,5 @@
export const SHARED_DESCRIPTION = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
] as const;

View File

@ -10,7 +10,7 @@ export function readFileByLine(file: string | BunFile) {
return file.stream().pipeThrough(new PolyfillTextDecoderStream()).pipeThrough(new TextLineStream());
}
export async function createReadlineInterfaceFromResponse(resp: Response) {
export function createReadlineInterfaceFromResponse(resp: Response) {
if (!resp.body) {
throw new Error('Failed to fetch remote text');
}

View File

@ -4,7 +4,7 @@ import { traceAsync } from './trace-runner';
import { defaultRequestInit, fetchWithRetry } from './fetch-retry';
import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
const publicSuffixPath = path.resolve(__dirname, '../../node_modules/.cache/public_suffix_list_dat.txt');
const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt');
const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
const customFetch = async (url: string | URL) => Bun.file(url);

View File

@ -23,7 +23,7 @@ export const processLine = (line: string): string | null => {
return trimmed;
};
export const processLineFromReadline = async (rl: AsyncGenerator<string>): Promise<string[]> => {
export const processLineFromReadline = async (rl: AsyncGenerator<string> | ReadableStream<string>): Promise<string[]> => {
const res: string[] = [];
for await (const line of rl) {
const l: string | null = processLine(line);

View File

@ -13,7 +13,6 @@
// limitations under the License.
// Polyfill for TextEncoderStream and TextDecoderStream
// Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun.
export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, string> {
@ -23,10 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, strin
constructor(
encoding: Encoding = 'utf-8',
{
fatal = false,
ignoreBOM = false,
}: ConstructorParameters<typeof TextDecoder>[1] = {},
{ fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {},
) {
const decoder = new TextDecoder(encoding, { fatal, ignoreBOM });
super({

View File

@ -19,57 +19,55 @@ interface TextLineStreamOptions {
* ```
*/
export class TextLineStream extends TransformStream<string, string> {
private __allowCR: boolean;
private __buf = '';
constructor(options?: TextLineStreamOptions) {
const allowCR = options?.allowCR ?? false;
super({
transform: (chunk, controller) => this.handle(chunk, controller),
transform: (chunk, controller) => {
chunk = this.__buf + chunk;
for (; ;) {
const lfIndex = chunk.indexOf('\n');
if (allowCR) {
const crIndex = chunk.indexOf('\r');
if (
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
(lfIndex === -1 || (lfIndex - 1) > crIndex)
) {
controller.enqueue(chunk.slice(0, crIndex));
chunk = chunk.slice(crIndex + 1);
continue;
}
}
if (lfIndex !== -1) {
let crOrLfIndex = lfIndex;
if (chunk[lfIndex - 1] === '\r') {
crOrLfIndex--;
}
controller.enqueue(chunk.slice(0, crOrLfIndex));
chunk = chunk.slice(lfIndex + 1);
continue;
}
break;
}
this.__buf = chunk;
},
flush: (controller) => {
if (this.__buf.length > 0) {
if (
this.__allowCR &&
this.__buf[this.__buf.length - 1] === '\r'
) controller.enqueue(this.__buf.slice(0, -1));
else controller.enqueue(this.__buf);
if (allowCR && this.__buf[this.__buf.length - 1] === '\r') {
controller.enqueue(this.__buf.slice(0, -1));
} else {
controller.enqueue(this.__buf);
};
}
},
});
this.__allowCR = options?.allowCR ?? false;
}
private handle(chunk: string, controller: TransformStreamDefaultController<string>) {
chunk = this.__buf + chunk;
for (;;) {
const lfIndex = chunk.indexOf('\n');
if (this.__allowCR) {
const crIndex = chunk.indexOf('\r');
if (
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
(lfIndex === -1 || (lfIndex - 1) > crIndex)
) {
controller.enqueue(chunk.slice(0, crIndex));
chunk = chunk.slice(crIndex + 1);
continue;
}
}
if (lfIndex !== -1) {
let crOrLfIndex = lfIndex;
if (chunk[lfIndex - 1] === '\r') {
crOrLfIndex--;
}
controller.enqueue(chunk.slice(0, crOrLfIndex));
chunk = chunk.slice(lfIndex + 1);
continue;
}
break;
}
this.__buf = chunk;
}
}

View File

@ -24,8 +24,8 @@ export interface TaskResult {
readonly taskName: string;
}
const task = <T>(__filename: string, fn: () => Promise<T>, customname: string | null = null) => {
const taskName = customname ?? path.basename(__filename, path.extname(__filename));
const task = <T>(importMetaPath: string, fn: () => Promise<T>, customname: string | null = null) => {
const taskName = customname ?? path.basename(importMetaPath, path.extname(importMetaPath));
return async () => {
console.log(`🏃 [${taskName}] Start executing`);
const start = performance.now();

View File

@ -15,7 +15,7 @@ const SPECIAL_SUFFIXES = new Set([
]);
const validateDomainSet = async (filePath: string) => {
for await (const l of readFileByLine(path.resolve(__dirname, '../List/domainset', filePath))) {
for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/domainset', filePath))) {
// starts with #
const line = processLine(l);
if (!line) {
@ -38,7 +38,7 @@ const validateDomainSet = async (filePath: string) => {
const _validateRuleset = async (filePath: string) => {
console.log(`[${filePath}]`);
for await (const l of readFileByLine(path.resolve(__dirname, '../List/non_ip', filePath))) {
for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/non_ip', filePath))) {
// starts with #
const line = processLine(l);
if (!line) {
@ -58,13 +58,13 @@ const _validateRuleset = async (filePath: string) => {
}
};
export const validate = task(__filename, async () => {
export const validate = task(import.meta.path, async () => {
// const [domainsetFiles, _rulesetFiles] = await Promise.all([
// listDir(path.resolve(__dirname, '../List/domainset')),
// listDir(path.resolve(__dirname, '../List/non_ip'))
// listDir(path.resolve(import.meta.dir, '../List/domainset')),
// listDir(path.resolve(import.meta.dir, '../List/non_ip'))
// ]);
return Promise.all([
listDir(path.resolve(__dirname, '../List/domainset'))
listDir(path.resolve(import.meta.dir, '../List/domainset'))
.then(domainsetFiles => Promise.all(domainsetFiles.map(file => validateDomainSet(file))))
// rulesetFiles.map(file => validateRuleset(file))
]);

View File

@ -145,6 +145,7 @@ mirror.navercorp.com
fedorapeople.org
.cloudflaremirrors.com
.repo.jing.rocks
mirrors.edge.kernel.org
# WhatPulse
releases.whatpulse.org
# GIMP