diff --git a/Build/build-anti-bogus-domain.ts b/Build/build-anti-bogus-domain.ts index f62df805..e6843bde 100644 --- a/Build/build-anti-bogus-domain.ts +++ b/Build/build-anti-bogus-domain.ts @@ -5,6 +5,7 @@ import { createRuleset } from './lib/create-file'; import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib/fetch-remote-text-by-line'; import { processLine } from './lib/process-line'; import { task } from './lib/trace-runner'; +import { SHARED_DESCRIPTION } from './lib/constants'; const getBogusNxDomainIPs = async () => { /** @type {string[]} */ @@ -22,12 +23,12 @@ const getBogusNxDomainIPs = async () => { return result; }; -export const buildAntiBogusDomain = task(__filename, async () => { +export const buildAntiBogusDomain = task(import.meta.path, async () => { const bogusIpPromise = getBogusNxDomainIPs(); /** @type {string[]} */ const result = []; - for await (const line of readFileByLine(path.resolve(__dirname, '../Source/ip/reject.conf'))) { + for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/ip/reject.conf'))) { if (line === '# --- [Anti Bogus Domain Replace Me] ---') { (await bogusIpPromise).forEach(rule => result.push(rule)); continue; @@ -40,9 +41,7 @@ export const buildAntiBogusDomain = task(__filename, async () => { } const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'This file contains known addresses that are hijacking NXDOMAIN results returned by DNS servers.', '', @@ -56,8 +55,8 @@ export const buildAntiBogusDomain = task(__filename, async () => { new Date(), result, 'ruleset', - path.resolve(__dirname, '../List/ip/reject.conf'), - path.resolve(__dirname, '../Clash/ip/reject.txt') + path.resolve(import.meta.dir, '../List/ip/reject.conf'), + path.resolve(import.meta.dir, '../Clash/ip/reject.txt') )); }); diff --git a/Build/build-apple-cdn.ts b/Build/build-apple-cdn.ts index 7bddca33..5ae8eb0b 100644 --- a/Build/build-apple-cdn.ts +++ b/Build/build-apple-cdn.ts @@ -3,14 +3,13 @@ import path from 'path'; import { createRuleset } from './lib/create-file'; import { parseFelixDnsmasq } from './lib/parse-dnsmasq'; import { task } from './lib/trace-runner'; +import { SHARED_DESCRIPTION } from './lib/constants'; -export const buildAppleCdn = task(__filename, async () => { +export const buildAppleCdn = task(import.meta.path, async () => { const res = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf'); const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'This file contains Apple\'s domains using their China mainland CDN servers.', '', @@ -28,8 +27,8 @@ export const buildAppleCdn = task(__filename, async () => { new Date(), ruleset, 'ruleset', - path.resolve(__dirname, '../List/non_ip/apple_cdn.conf'), - path.resolve(__dirname, '../Clash/non_ip/apple_cdn.txt') + path.resolve(import.meta.dir, '../List/non_ip/apple_cdn.conf'), + path.resolve(import.meta.dir, '../Clash/non_ip/apple_cdn.txt') ), ...createRuleset( 'Sukka\'s Ruleset - Apple CDN', @@ -37,8 +36,8 @@ export const buildAppleCdn = task(__filename, async () => { new Date(), domainset, 'domainset', - path.resolve(__dirname, '../List/domainset/apple_cdn.conf'), - path.resolve(__dirname, '../Clash/domainset/apple_cdn.txt') + path.resolve(import.meta.dir, '../List/domainset/apple_cdn.conf'), + path.resolve(import.meta.dir, '../Clash/domainset/apple_cdn.txt') ) ]); }); diff --git a/Build/build-cdn-conf.ts b/Build/build-cdn-conf.ts index b0c62c7b..d042cd26 100644 --- a/Build/build-cdn-conf.ts +++ b/Build/build-cdn-conf.ts @@ -4,8 +4,9 @@ import { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } from './lib import { createTrie } from './lib/trie'; import { task } from './lib/trace-runner'; import { processLine } from './lib/process-line'; +import { SHARED_DESCRIPTION } from './lib/constants'; -const publicSuffixPath: string = path.resolve(__dirname, '../node_modules/.cache/public_suffix_list_dat.txt'); +const publicSuffixPath: string = path.resolve(import.meta.dir, '../node_modules/.cache/public_suffix_list_dat.txt'); const getS3OSSDomains = async (): Promise> => { const trie = createTrie(); @@ -55,13 +56,13 @@ const getS3OSSDomains = async (): Promise> => { return S3OSSDomains; }; -const buildCdnConf = task(__filename, async () => { +const buildCdnConf = task(import.meta.path, async () => { /** @type {string[]} */ const cdnDomainsList: string[] = []; const getS3OSSDomainsPromise: Promise> = getS3OSSDomains(); - for await (const l of readFileByLine(path.resolve(__dirname, '../Source/non_ip/cdn.conf'))) { + for await (const l of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf'))) { if (l === '# --- [AWS S3 Replace Me] ---') { (await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); }); continue; @@ -73,9 +74,7 @@ const buildCdnConf = task(__filename, async () => { } const description: string[] = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'This file contains object storage and static assets CDN domains.' ]; @@ -86,8 +85,8 @@ const buildCdnConf = task(__filename, async () => { new Date(), cdnDomainsList, 'ruleset', - path.resolve(__dirname, '../List/non_ip/cdn.conf'), - path.resolve(__dirname, '../Clash/non_ip/cdn.txt') + path.resolve(import.meta.dir, '../List/non_ip/cdn.conf'), + path.resolve(import.meta.dir, '../Clash/non_ip/cdn.txt') )); }); diff --git a/Build/build-chn-cidr.ts b/Build/build-chn-cidr.ts index 6295f995..a618f19b 100644 --- a/Build/build-chn-cidr.ts +++ b/Build/build-chn-cidr.ts @@ -10,7 +10,7 @@ const EXCLUDE_CIDRS = [ '223.120.0.0/15' ]; -export const buildChnCidr = task(__filename, async () => { +export const buildChnCidr = task(import.meta.path, async () => { const [{ exclude }, cidr] = await Promise.all([ import('cidr-tools-wasm'), processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')) @@ -18,6 +18,7 @@ export const buildChnCidr = task(__filename, async () => { const filteredCidr = exclude(cidr, EXCLUDE_CIDRS, true); + // Can not use SHARED_DESCRIPTION here as different license const description = [ 'License: CC BY-SA 2.0', 'Homepage: https://ruleset.skk.moe', @@ -34,7 +35,7 @@ export const buildChnCidr = task(__filename, async () => { new Date(), filteredCidr.map(i => `IP-CIDR,${i}`) ), - pathResolve(__dirname, '../List/ip/china_ip.conf') + pathResolve(import.meta.dir, '../List/ip/china_ip.conf') ), compareAndWriteFile( withBannerArray( @@ -43,7 +44,7 @@ export const buildChnCidr = task(__filename, async () => { new Date(), filteredCidr ), - pathResolve(__dirname, '../Clash/ip/china_ip.txt') + pathResolve(import.meta.dir, '../Clash/ip/china_ip.txt') ) ]); }); diff --git a/Build/build-common.ts b/Build/build-common.ts index 57bf9f7a..2188bead 100644 --- a/Build/build-common.ts +++ b/Build/build-common.ts @@ -7,16 +7,17 @@ import { processLine } from './lib/process-line'; import { createRuleset } from './lib/create-file'; import { domainDeduper } from './lib/domain-deduper'; import { task } from './lib/trace-runner'; +import { SHARED_DESCRIPTION } from './lib/constants'; const MAGIC_COMMAND_SKIP = '# $ custom_build_script'; const MAGIC_COMMAND_TITLE = '# $ meta_title '; const MAGIC_COMMAND_DESCRIPTION = '# $ meta_description '; -const sourceDir = path.resolve(__dirname, '../Source'); -const outputSurgeDir = path.resolve(__dirname, '../List'); -const outputClashDir = path.resolve(__dirname, '../Clash'); +const sourceDir = path.resolve(import.meta.dir, '../Source'); +const outputSurgeDir = path.resolve(import.meta.dir, '../List'); +const outputClashDir = path.resolve(import.meta.dir, '../Clash'); -export const buildCommon = task(__filename, async () => { +export const buildCommon = task(import.meta.path, async () => { const promises: Promise[] = []; const pw = new PathScurry(sourceDir); @@ -49,30 +50,38 @@ if (import.meta.main) { } const processFile = async (sourcePath: string) => { + console.log('Processing', sourcePath); + const lines: string[] = []; let title = ''; const descriptions: string[] = []; - for await (const line of readFileByLine(sourcePath)) { - if (line === MAGIC_COMMAND_SKIP) { - return; - } - if (line.startsWith(MAGIC_COMMAND_TITLE)) { - title = line.slice(MAGIC_COMMAND_TITLE.length).trim(); - continue; - } - - if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) { - descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim()); - continue; - } - - const l = processLine(line); - if (l) { - lines.push(l); + try { + for await (const line of readFileByLine(sourcePath)) { + if (line === MAGIC_COMMAND_SKIP) { + return; + } + + if (line.startsWith(MAGIC_COMMAND_TITLE)) { + title = line.slice(MAGIC_COMMAND_TITLE.length).trim(); + continue; + } + + if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) { + descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim()); + continue; + } + + const l = processLine(line); + if (l) { + lines.push(l); + } } + } catch (e) { + console.error('Error processing', sourcePath); + console.trace(e); } return [title, descriptions, lines] as const; @@ -85,9 +94,7 @@ async function transformDomainset(sourcePath: string, relativePath: string) { const deduped = domainDeduper(lines); const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, ...( descriptions.length ? ['', ...descriptions] @@ -115,9 +122,7 @@ async function transformRuleset(sourcePath: string, relativePath: string) { const [title, descriptions, lines] = res; const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, ...( descriptions.length ? ['', ...descriptions] diff --git a/Build/build-domestic-ruleset.ts b/Build/build-domestic-ruleset.ts index 2c51021f..422e74d0 100644 --- a/Build/build-domestic-ruleset.ts +++ b/Build/build-domestic-ruleset.ts @@ -5,9 +5,10 @@ import { readFileByLine } from './lib/fetch-remote-text-by-line'; import { processLineFromReadline } from './lib/process-line'; import { compareAndWriteFile, createRuleset } from './lib/create-file'; import { task } from './lib/trace-runner'; +import { SHARED_DESCRIPTION } from './lib/constants'; -export const buildDomesticRuleset = task(__filename, async () => { - const results = await processLineFromReadline(readFileByLine(path.resolve(__dirname, '../Source/non_ip/domestic.conf'))); +export const buildDomesticRuleset = task(import.meta.path, async () => { + const results = await processLineFromReadline(readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/domestic.conf'))); results.push( ...Object.entries(DOMESTICS) @@ -22,9 +23,7 @@ export const buildDomesticRuleset = task(__filename, async () => { ); const rulesetDescription = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'This file contains known addresses that are avaliable in the Mainland China.' ]; @@ -36,8 +35,8 @@ export const buildDomesticRuleset = task(__filename, async () => { new Date(), results, 'ruleset', - path.resolve(__dirname, '../List/non_ip/domestic.conf'), - path.resolve(__dirname, '../Clash/non_ip/domestic.txt') + path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'), + path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt') ), compareAndWriteFile( [ @@ -53,7 +52,7 @@ export const buildDomesticRuleset = task(__filename, async () => { ]) ) ], - path.resolve(__dirname, '../Modules/sukka_local_dns_mapping.sgmodule') + path.resolve(import.meta.dir, '../Modules/sukka_local_dns_mapping.sgmodule') ) ]); }); diff --git a/Build/build-internal-cdn-rules.ts b/Build/build-internal-cdn-rules.ts index 7438eab9..7db68873 100644 --- a/Build/build-internal-cdn-rules.ts +++ b/Build/build-internal-cdn-rules.ts @@ -12,7 +12,7 @@ import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, '\\$&'); -export const buildInternalCDNDomains = task(__filename, async () => { +export const buildInternalCDNDomains = task(import.meta.path, async () => { const set = new Set(); const keywords = new Set(); @@ -63,16 +63,16 @@ export const buildInternalCDNDomains = task(__filename, async () => { }; await Promise.all([ - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')), - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')), - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')), - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')), - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')), - processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')), - processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')), - processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/cdn.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/global_plus.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/my_proxy.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/stream.conf')), + processLocalRuleSet(path.resolve(import.meta.dir, '../List/non_ip/telegram.conf')), + processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/cdn.conf')), + processLocalDomainSet(path.resolve(import.meta.dir, '../List/domainset/download.conf')), - fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) + fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true }) ]); return compareAndWriteFile( @@ -80,7 +80,7 @@ export const buildInternalCDNDomains = task(__filename, async () => { ...Array.from(set).sort(domainSorter).map(i => `SUFFIX,${i}`), ...Array.from(keywords).sort().map(i => `REGEX,${i}`) ], - path.resolve(__dirname, '../List/internal/cdn.txt') + path.resolve(import.meta.dir, '../List/internal/cdn.txt') ); }); diff --git a/Build/build-internal-chn-domains.ts b/Build/build-internal-chn-domains.ts index da8c55a9..3328b523 100644 --- a/Build/build-internal-chn-domains.ts +++ b/Build/build-internal-chn-domains.ts @@ -4,15 +4,15 @@ import { parseFelixDnsmasq } from './lib/parse-dnsmasq'; import { task } from './lib/trace-runner'; import { compareAndWriteFile } from './lib/create-file'; -export const buildInternalChnDomains = task(__filename, async () => { +export const buildInternalChnDomains = task(import.meta.path, async () => { const [result] = await Promise.all([ parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'), - fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) + fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true }) ]); return compareAndWriteFile( result.map(line => `SUFFIX,${line}`), - path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt') + path.resolve(import.meta.dir, '../List/internal/accelerated-china-domains.txt') ); }); diff --git a/Build/build-internal-reverse-chn-cidr.ts b/Build/build-internal-reverse-chn-cidr.ts index d094dc83..5f923dd0 100644 --- a/Build/build-internal-reverse-chn-cidr.ts +++ b/Build/build-internal-reverse-chn-cidr.ts @@ -22,11 +22,11 @@ const RESERVED_IPV4_CIDR = [ '240.0.0.0/4' ]; -export const buildInternalReverseChnCIDR = task(__filename, async () => { +export const buildInternalReverseChnCIDR = task(import.meta.path, async () => { const [{ exclude }, cidr] = await Promise.all([ import('cidr-tools-wasm'), processLineFromReadline(await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')), - fsp.mkdir(path.resolve(__dirname, '../List/internal'), { recursive: true }) + fsp.mkdir(path.resolve(import.meta.dir, '../List/internal'), { recursive: true }) ]); const reversedCidr = exclude( @@ -40,7 +40,7 @@ export const buildInternalReverseChnCIDR = task(__filename, async () => { true ); - return Bun.write(path.resolve(__dirname, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`); + return Bun.write(path.resolve(import.meta.dir, '../List/internal/reversed-chn-cidr.txt'), `${reversedCidr.join('\n')}\n`); }); if (import.meta.main) { diff --git a/Build/build-phishing-domainset.ts b/Build/build-phishing-domainset.ts index 862b9023..3380ef32 100644 --- a/Build/build-phishing-domainset.ts +++ b/Build/build-phishing-domainset.ts @@ -8,6 +8,7 @@ import createTrie from './lib/trie'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import { createCachedGorhillGetDomain } from './lib/cached-tld-parse'; import * as tldts from 'tldts'; +import { SHARED_DESCRIPTION } from './lib/constants'; const WHITELIST_DOMAIN = new Set([ 'w3s.link', @@ -63,7 +64,7 @@ const BLACK_TLD = new Set([ 'com.cn' ]); -export const buildPhishingDomainSet = task(__filename, async () => { +export const buildPhishingDomainSet = task(import.meta.path, async () => { const [{ black: domainSet }, gorhill] = await Promise.all([ processFilterRules( 'https://curbengh.github.io/phishing-filter/phishing-filter-agh.txt', @@ -162,9 +163,7 @@ export const buildPhishingDomainSet = task(__filename, async () => { .sort(domainSorter)); const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'The domainset supports enhanced phishing protection', 'Build from:', @@ -177,8 +176,8 @@ export const buildPhishingDomainSet = task(__filename, async () => { new Date(), results, 'domainset', - path.resolve(__dirname, '../List/domainset/reject_phishing.conf'), - path.resolve(__dirname, '../Clash/domainset/reject_phishing.txt') + path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'), + path.resolve(import.meta.dir, '../Clash/domainset/reject_phishing.txt') )); }); diff --git a/Build/build-public.ts b/Build/build-public.ts index 07751707..91f27a7b 100644 --- a/Build/build-public.ts +++ b/Build/build-public.ts @@ -3,8 +3,8 @@ import path from 'path'; import fsp from 'fs/promises' import { task } from './lib/trace-runner'; -const rootPath = path.resolve(__dirname, '../'); -const publicPath = path.resolve(__dirname, '../public'); +const rootPath = path.resolve(import.meta.dir, '../'); +const publicPath = path.resolve(import.meta.dir, '../public'); const folderAndFilesToBeDeployed = [ 'Assets', @@ -15,7 +15,7 @@ const folderAndFilesToBeDeployed = [ 'LICENSE' ]; -export const buildPublicHtml = task(__filename, async () => { +export const buildPublicHtml = task(import.meta.path, async () => { await fsp.mkdir(publicPath, { recursive: true }); await Promise.all(folderAndFilesToBeDeployed.map(dir => fsp.cp( path.resolve(rootPath, dir), diff --git a/Build/build-redirect-module.ts b/Build/build-redirect-module.ts index c9f1cd3f..92333986 100644 --- a/Build/build-redirect-module.ts +++ b/Build/build-redirect-module.ts @@ -71,7 +71,7 @@ const REDIRECT = /** @type {const} */ ([ ['googleajax.wp-china-yes.net/', 'https://ajax.googleapis.com/'] ]); -export const buildRedirectModule = task(__filename, async () => { +export const buildRedirectModule = task(import.meta.path, async () => { const domains = Array.from(new Set(REDIRECT.map(([from]) => tldts.getHostname(from, { detectIp: false })))).filter(Boolean); return compareAndWriteFile( @@ -88,7 +88,7 @@ export const buildRedirectModule = task(__filename, async () => { return `${src} ${to}$1 302`; }) ], - path.resolve(__dirname, '../Modules/sukka_url_redirect.sgmodule') + path.resolve(import.meta.dir, '../Modules/sukka_url_redirect.sgmodule') ); }); diff --git a/Build/build-reject-domainset.ts b/Build/build-reject-domainset.ts index c9517996..5477999a 100644 --- a/Build/build-reject-domainset.ts +++ b/Build/build-reject-domainset.ts @@ -15,6 +15,7 @@ import { createDomainSorter } from './lib/stable-sort-domain'; import { traceSync, task } from './lib/trace-runner'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import * as tldts from 'tldts'; +import { SHARED_DESCRIPTION } from './lib/constants'; /** Whitelists */ const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST); @@ -23,7 +24,7 @@ const domainKeywordsSet: Set = new Set(); /** @type {Set} Dedupe domains included by DOMAIN-SUFFIX */ const domainSuffixSet: Set = new Set(); -export const buildRejectDomainSet = task(__filename, async () => { +export const buildRejectDomainSet = task(import.meta.path, async () => { /** @type Set */ const domainSets: Set = new Set(); @@ -97,7 +98,7 @@ export const buildRejectDomainSet = task(__filename, async () => { let previousSize = domainSets.size; console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`); - for await (const line of readFileByLine(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'))) { + for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'))) { const l = processLine(line); if (l) { domainSets.add(l); @@ -107,7 +108,7 @@ export const buildRejectDomainSet = task(__filename, async () => { previousSize = domainSets.size - previousSize; console.log(`Import ${previousSize} rules from reject_sukka.conf!`); - for await (const line of readFileByLine(path.resolve(__dirname, '../Source/non_ip/reject.conf'))) { + for await (const line of readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/reject.conf'))) { if (line.startsWith('DOMAIN-KEYWORD')) { const [, ...keywords] = line.split(','); domainKeywordsSet.add(keywords.join(',').trim()); @@ -117,7 +118,7 @@ export const buildRejectDomainSet = task(__filename, async () => { } } - for await (const line of readFileByLine(path.resolve(__dirname, '../List/domainset/reject_phishing.conf'))) { + for await (const line of readFileByLine(path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'))) { const l = processLine(line); if (l && l[0] === '.') { domainSuffixSet.add(l.slice(1)); @@ -196,9 +197,7 @@ export const buildRejectDomainSet = task(__filename, async () => { const domainset = traceSync('* Sort reject domainset', () => dudupedDominArray.sort(domainSorter)); const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining', '', @@ -214,17 +213,17 @@ export const buildRejectDomainSet = task(__filename, async () => { new Date(), domainset, 'domainset', - path.resolve(__dirname, '../List/domainset/reject.conf'), - path.resolve(__dirname, '../Clash/domainset/reject.txt') + path.resolve(import.meta.dir, '../List/domainset/reject.conf'), + path.resolve(import.meta.dir, '../Clash/domainset/reject.txt') ), compareAndWriteFile( rejectDomainsStats.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`), - path.resolve(__dirname, '../List/internal/reject-stats.txt') + path.resolve(import.meta.dir, '../List/internal/reject-stats.txt') ), // Copy reject_sukka.conf for backward compatibility fsp.cp( - path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'), - path.resolve(__dirname, '../List/domainset/reject_sukka.conf'), + path.resolve(import.meta.dir, '../Source/domainset/reject_sukka.conf'), + path.resolve(import.meta.dir, '../List/domainset/reject_sukka.conf'), { force: true, recursive: true } ) ]); diff --git a/Build/build-speedtest-domainset.ts b/Build/build-speedtest-domainset.ts index 1b0091ab..fa0c604a 100644 --- a/Build/build-speedtest-domainset.ts +++ b/Build/build-speedtest-domainset.ts @@ -7,6 +7,7 @@ import { Sema } from 'async-sema'; import * as tldts from 'tldts'; import { task } from './lib/trace-runner'; import { fetchWithRetry } from './lib/fetch-retry'; +import { SHARED_DESCRIPTION } from './lib/constants'; const s = new Sema(3); @@ -57,7 +58,7 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> => } }; -export const buildSpeedtestDomainSet = task(__filename, async () => { +export const buildSpeedtestDomainSet = task(import.meta.path, async () => { /** @type {Set} */ const domains: Set = new Set([ '.speedtest.net', @@ -135,9 +136,9 @@ export const buildSpeedtestDomainSet = task(__filename, async () => { const deduped = domainDeduper(Array.from(domains)).sort(domainSorter); const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge' + ...SHARED_DESCRIPTION, + '', + 'This file contains common speedtest endpoints.' ]; return Promise.all(createRuleset( @@ -146,8 +147,8 @@ export const buildSpeedtestDomainSet = task(__filename, async () => { new Date(), deduped, 'domainset', - path.resolve(__dirname, '../List/domainset/speedtest.conf'), - path.resolve(__dirname, '../Clash/domainset/speedtest.txt') + path.resolve(import.meta.dir, '../List/domainset/speedtest.conf'), + path.resolve(import.meta.dir, '../Clash/domainset/speedtest.txt') )); }); diff --git a/Build/build-stream-service.ts b/Build/build-stream-service.ts index d9d51d80..2d67d447 100644 --- a/Build/build-stream-service.ts +++ b/Build/build-stream-service.ts @@ -5,6 +5,7 @@ import path from 'path'; import { createRuleset } from './lib/create-file'; import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream'; +import { SHARED_DESCRIPTION } from './lib/constants'; const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => { return [ @@ -12,25 +13,21 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ ...createRuleset( `Sukka's Ruleset - Stream Services: ${title}`, [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', ...streamServices.map((i: { name: any; }) => `- ${i.name}`) ], new Date(), streamServices.flatMap((i: { rules: any; }) => i.rules), 'ruleset', - path.resolve(__dirname, `../List/non_ip/${fileId}.conf`), - path.resolve(__dirname, `../Clash/non_ip/${fileId}.txt`) + path.resolve(import.meta.dir, `../List/non_ip/${fileId}.conf`), + path.resolve(import.meta.dir, `../Clash/non_ip/${fileId}.txt`) ), // IP ...createRuleset( `Sukka's Ruleset - Stream Services' IPs: ${title}`, [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, '', ...streamServices.map((i: { name: any; }) => `- ${i.name}`) ], @@ -44,13 +41,13 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ : [] )), 'ruleset', - path.resolve(__dirname, `../List/ip/${fileId}.conf`), - path.resolve(__dirname, `../Clash/ip/${fileId}.txt`) + path.resolve(import.meta.dir, `../List/ip/${fileId}.conf`), + path.resolve(import.meta.dir, `../Clash/ip/${fileId}.txt`) ) ]; }; -export const buildStreamService = task(__filename, async () => { +export const buildStreamService = task(import.meta.path, async () => { return Promise.all([ ...createRulesetForStreamService('stream', 'All', ALL), ...createRulesetForStreamService('stream_us', 'North America', NORTH_AMERICA), diff --git a/Build/build-telegram-cidr.ts b/Build/build-telegram-cidr.ts index f6fe11ae..bf6e8fbf 100644 --- a/Build/build-telegram-cidr.ts +++ b/Build/build-telegram-cidr.ts @@ -6,14 +6,14 @@ import { isIPv4, isIPv6 } from 'net'; import { processLine } from './lib/process-line'; import { createRuleset } from './lib/create-file'; import { task } from './lib/trace-runner'; +import { SHARED_DESCRIPTION } from './lib/constants'; -export const buildTelegramCIDR = task(__filename, async () => { +export const buildTelegramCIDR = task(import.meta.path, async () => { const resp = await fetchWithRetry('https://core.telegram.org/resources/cidr.txt', defaultRequestInit); const lastModified = resp.headers.get('last-modified'); const date = lastModified ? new Date(lastModified) : new Date(); - /** @type {string[]} */ - const results = []; + const results: string[] = []; for await (const line of createReadlineInterfaceFromResponse(resp)) { const cidr = processLine(line); @@ -33,9 +33,7 @@ export const buildTelegramCIDR = task(__filename, async () => { } const description = [ - 'License: AGPL 3.0', - 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + ...SHARED_DESCRIPTION, 'Data from:', ' - https://core.telegram.org/resources/cidr.txt' ]; @@ -46,8 +44,8 @@ export const buildTelegramCIDR = task(__filename, async () => { date, results, 'ruleset', - path.resolve(__dirname, '../List/ip/telegram.conf'), - path.resolve(__dirname, '../Clash/ip/telegram.txt') + path.resolve(import.meta.dir, '../List/ip/telegram.conf'), + path.resolve(import.meta.dir, '../Clash/ip/telegram.txt') )); }); diff --git a/Build/download-previous-build.ts b/Build/download-previous-build.ts index e856cad9..b8bfccae 100644 --- a/Build/download-previous-build.ts +++ b/Build/download-previous-build.ts @@ -13,12 +13,12 @@ import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry'; const IS_READING_BUILD_OUTPUT = 1 << 2; const ALL_FILES_EXISTS = 1 << 3; -export const downloadPreviousBuild = task(__filename, async () => { +export const downloadPreviousBuild = task(import.meta.path, async () => { const buildOutputList: string[] = []; let flag = 1 | ALL_FILES_EXISTS; - for await (const line of readFileByLine(path.resolve(__dirname, '../.gitignore'))) { + for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) { if (line === '# $ build output') { flag = flag | IS_READING_BUILD_OUTPUT; continue; @@ -31,7 +31,7 @@ export const downloadPreviousBuild = task(__filename, async () => { if (!isCI) { // Bun.file().exists() doesn't check directory - if (!fs.existsSync(path.join(__dirname, '..', line))) { + if (!fs.existsSync(path.join(import.meta.dir, '..', line))) { flag = flag & ~ALL_FILES_EXISTS; } } @@ -74,7 +74,7 @@ export const downloadPreviousBuild = task(__filename, async () => { const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, ''); - const targetPath = path.join(__dirname, '..', relativeEntryPath); + const targetPath = path.join(import.meta.dir, '..', relativeEntryPath); await fsp.mkdir(path.dirname(targetPath), { recursive: true }); const targetFile = Bun.file(targetPath); @@ -97,8 +97,8 @@ export const downloadPreviousBuild = task(__filename, async () => { ); }); -export const downloadPublicSuffixList = task(__filename, async () => { - const publicSuffixDir = path.resolve(__dirname, '../node_modules/.cache'); +export const downloadPublicSuffixList = task(import.meta.path, async () => { + const publicSuffixDir = path.resolve(import.meta.dir, '../node_modules/.cache'); const publicSuffixPath = path.join(publicSuffixDir, 'public_suffix_list_dat.txt'); const [resp] = await Promise.all([ diff --git a/Build/lib/constants.ts b/Build/lib/constants.ts new file mode 100644 index 00000000..a6566f2b --- /dev/null +++ b/Build/lib/constants.ts @@ -0,0 +1,5 @@ +export const SHARED_DESCRIPTION = [ + 'License: AGPL 3.0', + 'Homepage: https://ruleset.skk.moe', + 'GitHub: https://github.com/SukkaW/Surge', +] as const; diff --git a/Build/lib/fetch-remote-text-by-line.ts b/Build/lib/fetch-remote-text-by-line.ts index 4c6988c9..ee546c2f 100644 --- a/Build/lib/fetch-remote-text-by-line.ts +++ b/Build/lib/fetch-remote-text-by-line.ts @@ -10,7 +10,7 @@ export function readFileByLine(file: string | BunFile) { return file.stream().pipeThrough(new PolyfillTextDecoderStream()).pipeThrough(new TextLineStream()); } -export async function createReadlineInterfaceFromResponse(resp: Response) { +export function createReadlineInterfaceFromResponse(resp: Response) { if (!resp.body) { throw new Error('Failed to fetch remote text'); } diff --git a/Build/lib/get-gorhill-publicsuffix.ts b/Build/lib/get-gorhill-publicsuffix.ts index d2e64658..b7a47ab8 100644 --- a/Build/lib/get-gorhill-publicsuffix.ts +++ b/Build/lib/get-gorhill-publicsuffix.ts @@ -4,7 +4,7 @@ import { traceAsync } from './trace-runner'; import { defaultRequestInit, fetchWithRetry } from './fetch-retry'; import type { PublicSuffixList } from 'gorhill-publicsuffixlist'; -const publicSuffixPath = path.resolve(__dirname, '../../node_modules/.cache/public_suffix_list_dat.txt'); +const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt'); const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => { const customFetch = async (url: string | URL) => Bun.file(url); diff --git a/Build/lib/process-line.ts b/Build/lib/process-line.ts index 512711ac..8ea79925 100644 --- a/Build/lib/process-line.ts +++ b/Build/lib/process-line.ts @@ -23,7 +23,7 @@ export const processLine = (line: string): string | null => { return trimmed; }; -export const processLineFromReadline = async (rl: AsyncGenerator): Promise => { +export const processLineFromReadline = async (rl: AsyncGenerator | ReadableStream): Promise => { const res: string[] = []; for await (const line of rl) { const l: string | null = processLine(line); diff --git a/Build/lib/text-decoder-stream.ts b/Build/lib/text-decoder-stream.ts index 5b11da7b..e9cd5de2 100644 --- a/Build/lib/text-decoder-stream.ts +++ b/Build/lib/text-decoder-stream.ts @@ -13,7 +13,6 @@ // limitations under the License. // Polyfill for TextEncoderStream and TextDecoderStream - // Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun. export class PolyfillTextDecoderStream extends TransformStream { @@ -23,10 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream[1] = {}, + { fatal = false, ignoreBOM = false }: ConstructorParameters[1] = {}, ) { const decoder = new TextDecoder(encoding, { fatal, ignoreBOM }); super({ diff --git a/Build/lib/text-line-transform-stream.ts b/Build/lib/text-line-transform-stream.ts index c4becbb2..bd4f44ea 100644 --- a/Build/lib/text-line-transform-stream.ts +++ b/Build/lib/text-line-transform-stream.ts @@ -19,57 +19,55 @@ interface TextLineStreamOptions { * ``` */ export class TextLineStream extends TransformStream { - private __allowCR: boolean; private __buf = ''; constructor(options?: TextLineStreamOptions) { + const allowCR = options?.allowCR ?? false; + super({ - transform: (chunk, controller) => this.handle(chunk, controller), + transform: (chunk, controller) => { + chunk = this.__buf + chunk; + + for (; ;) { + const lfIndex = chunk.indexOf('\n'); + + if (allowCR) { + const crIndex = chunk.indexOf('\r'); + + if ( + crIndex !== -1 && crIndex !== (chunk.length - 1) && + (lfIndex === -1 || (lfIndex - 1) > crIndex) + ) { + controller.enqueue(chunk.slice(0, crIndex)); + chunk = chunk.slice(crIndex + 1); + continue; + } + } + + if (lfIndex !== -1) { + let crOrLfIndex = lfIndex; + if (chunk[lfIndex - 1] === '\r') { + crOrLfIndex--; + } + controller.enqueue(chunk.slice(0, crOrLfIndex)); + chunk = chunk.slice(lfIndex + 1); + continue; + } + + break; + } + + this.__buf = chunk; + }, flush: (controller) => { if (this.__buf.length > 0) { - if ( - this.__allowCR && - this.__buf[this.__buf.length - 1] === '\r' - ) controller.enqueue(this.__buf.slice(0, -1)); - else controller.enqueue(this.__buf); + if (allowCR && this.__buf[this.__buf.length - 1] === '\r') { + controller.enqueue(this.__buf.slice(0, -1)); + } else { + controller.enqueue(this.__buf); + }; } }, }); - this.__allowCR = options?.allowCR ?? false; - } - - private handle(chunk: string, controller: TransformStreamDefaultController) { - chunk = this.__buf + chunk; - - for (;;) { - const lfIndex = chunk.indexOf('\n'); - - if (this.__allowCR) { - const crIndex = chunk.indexOf('\r'); - - if ( - crIndex !== -1 && crIndex !== (chunk.length - 1) && - (lfIndex === -1 || (lfIndex - 1) > crIndex) - ) { - controller.enqueue(chunk.slice(0, crIndex)); - chunk = chunk.slice(crIndex + 1); - continue; - } - } - - if (lfIndex !== -1) { - let crOrLfIndex = lfIndex; - if (chunk[lfIndex - 1] === '\r') { - crOrLfIndex--; - } - controller.enqueue(chunk.slice(0, crOrLfIndex)); - chunk = chunk.slice(lfIndex + 1); - continue; - } - - break; - } - - this.__buf = chunk; } } diff --git a/Build/lib/trace-runner.ts b/Build/lib/trace-runner.ts index 886ac177..bb74c478 100644 --- a/Build/lib/trace-runner.ts +++ b/Build/lib/trace-runner.ts @@ -24,8 +24,8 @@ export interface TaskResult { readonly taskName: string; } -const task = (__filename: string, fn: () => Promise, customname: string | null = null) => { - const taskName = customname ?? path.basename(__filename, path.extname(__filename)); +const task = (importMetaPath: string, fn: () => Promise, customname: string | null = null) => { + const taskName = customname ?? path.basename(importMetaPath, path.extname(importMetaPath)); return async () => { console.log(`🏃 [${taskName}] Start executing`); const start = performance.now(); diff --git a/Build/validate-domainset.ts b/Build/validate-domainset.ts index 5faf1400..b3dbadd4 100644 --- a/Build/validate-domainset.ts +++ b/Build/validate-domainset.ts @@ -15,7 +15,7 @@ const SPECIAL_SUFFIXES = new Set([ ]); const validateDomainSet = async (filePath: string) => { - for await (const l of readFileByLine(path.resolve(__dirname, '../List/domainset', filePath))) { + for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/domainset', filePath))) { // starts with # const line = processLine(l); if (!line) { @@ -38,7 +38,7 @@ const validateDomainSet = async (filePath: string) => { const _validateRuleset = async (filePath: string) => { console.log(`[${filePath}]`); - for await (const l of readFileByLine(path.resolve(__dirname, '../List/non_ip', filePath))) { + for await (const l of readFileByLine(path.resolve(import.meta.dir, '../List/non_ip', filePath))) { // starts with # const line = processLine(l); if (!line) { @@ -58,13 +58,13 @@ const _validateRuleset = async (filePath: string) => { } }; -export const validate = task(__filename, async () => { +export const validate = task(import.meta.path, async () => { // const [domainsetFiles, _rulesetFiles] = await Promise.all([ - // listDir(path.resolve(__dirname, '../List/domainset')), - // listDir(path.resolve(__dirname, '../List/non_ip')) + // listDir(path.resolve(import.meta.dir, '../List/domainset')), + // listDir(path.resolve(import.meta.dir, '../List/non_ip')) // ]); return Promise.all([ - listDir(path.resolve(__dirname, '../List/domainset')) + listDir(path.resolve(import.meta.dir, '../List/domainset')) .then(domainsetFiles => Promise.all(domainsetFiles.map(file => validateDomainSet(file)))) // rulesetFiles.map(file => validateRuleset(file)) ]); diff --git a/Source/domainset/download.conf b/Source/domainset/download.conf index 8be4ddf2..e8c7c739 100644 --- a/Source/domainset/download.conf +++ b/Source/domainset/download.conf @@ -145,6 +145,7 @@ mirror.navercorp.com fedorapeople.org .cloudflaremirrors.com .repo.jing.rocks +mirrors.edge.kernel.org # WhatPulse releases.whatpulse.org # GIMP