Perf: make build faster

This commit is contained in:
SukkaW 2023-08-11 17:24:56 +08:00
parent e58ad2c0ac
commit 82f10868c1
9 changed files with 97 additions and 108 deletions

View File

@ -10,11 +10,9 @@ const { minifyRules } = require('./lib/minify-rules');
console.time('Total Time - build-anti-bogus-domain'); console.time('Total Time - build-anti-bogus-domain');
console.time('* Download bogus-nxdomain-list'); console.time('* Download bogus-nxdomain-list');
const rl = await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf');
/** @type {string[]} */ /** @type {string[]} */
const res = []; const res = [];
for await (const line of rl) { for await (const line of await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf')) {
if (line.startsWith('bogus-nxdomain=')) { if (line.startsWith('bogus-nxdomain=')) {
res.push(line.replace('bogus-nxdomain=', '')); res.push(line.replace('bogus-nxdomain=', ''));
} }

View File

@ -40,22 +40,6 @@ const Trie = require('./lib/trie');
} }
} }
await compareAndWriteFile(
withBannerArray(
'Sukka\'s Surge Rules - CDN Domains',
[
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'',
'This file contains object storage and static assets CDN domains.'
],
new Date(),
minifyRules(cdnDomainsList)
),
path.resolve(__dirname, '../List/non_ip/cdn.conf')
);
/** /**
* Dedupe cdn.conf * Dedupe cdn.conf
*/ */
@ -71,21 +55,38 @@ const Trie = require('./lib/trie');
} }
} }
await compareAndWriteFile( await Promise.all([
withBannerArray( compareAndWriteFile(
'Sukka\'s Surge Rules - CDN Domains', withBannerArray(
[ 'Sukka\'s Surge Rules - CDN Domains',
'License: AGPL 3.0', [
'Homepage: https://ruleset.skk.moe', 'License: AGPL 3.0',
'GitHub: https://github.com/SukkaW/Surge', 'Homepage: https://ruleset.skk.moe',
'', 'GitHub: https://github.com/SukkaW/Surge',
'This file contains object storage and static assets CDN domains.' '',
], 'This file contains object storage and static assets CDN domains.'
new Date(), ],
minifyRules(domainDeduper(Array.from(cdnDomains))) new Date(),
minifyRules(cdnDomainsList)
),
path.resolve(__dirname, '../List/non_ip/cdn.conf')
), ),
path.resolve(__dirname, '../List/domainset/cdn.conf') compareAndWriteFile(
); withBannerArray(
'Sukka\'s Surge Rules - CDN Domains',
[
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'',
'This file contains object storage and static assets CDN domains.'
],
new Date(),
minifyRules(domainDeduper(Array.from(cdnDomains)))
),
path.resolve(__dirname, '../List/domainset/cdn.conf')
)
]);
console.timeEnd('Total Time - build-cdn-conf'); console.timeEnd('Total Time - build-cdn-conf');
})(); })();

View File

@ -62,17 +62,19 @@ const escapeRegExp = (string) => {
} }
}; };
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')); await Promise.all([
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')); processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')),
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')); processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')),
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')); processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')),
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')); processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')),
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')); processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')),
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')),
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')),
await processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')); fse.ensureDir(path.resolve(__dirname, '../List/internal'))
await processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')); ]);
await fse.ensureDir(path.resolve(__dirname, '../List/internal'));
await fs.promises.writeFile( await fs.promises.writeFile(
path.resolve(__dirname, '../List/internal/cdn.txt'), path.resolve(__dirname, '../List/internal/cdn.txt'),
[ [

View File

@ -5,9 +5,11 @@ const fs = require('fs');
const { parseFelixDnsmasq } = require('./lib/parse-dnsmasq'); const { parseFelixDnsmasq } = require('./lib/parse-dnsmasq');
(async () => { (async () => {
const result = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'); const [result] = await Promise.all([
parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'),
fse.ensureDir(path.resolve(__dirname, '../List/internal'))
]);
await fse.ensureDir(path.resolve(__dirname, '../List/internal'));
await fs.promises.writeFile( await fs.promises.writeFile(
path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt'), path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt'),
`${result.map(line => `SUFFIX,${line}`).join('\n')}\n` `${result.map(line => `SUFFIX,${line}`).join('\n')}\n`

View File

@ -106,9 +106,7 @@ const domainSuffixSet = new Set();
let previousSize = domainSets.size; let previousSize = domainSets.size;
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`); console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
const rl1 = readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf')); for await (const line of readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'))) {
for await (const line of rl1) {
const l = processLine(line); const l = processLine(line);
if (l) { if (l) {
domainSets.add(l); domainSets.add(l);
@ -118,8 +116,7 @@ const domainSuffixSet = new Set();
previousSize = domainSets.size - previousSize; previousSize = domainSets.size - previousSize;
console.log(`Import ${previousSize} rules from reject_sukka.conf!`); console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
const rl2 = readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf')); for await (const line of readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf'))) {
for await (const line of rl2) {
if (line.startsWith('DOMAIN-KEYWORD')) { if (line.startsWith('DOMAIN-KEYWORD')) {
const [, ...keywords] = line.split(','); const [, ...keywords] = line.split(',');
domainKeywordsSet.add(keywords.join(',').trim()); domainKeywordsSet.add(keywords.join(',').trim());
@ -129,8 +126,7 @@ const domainSuffixSet = new Set();
} }
} }
const rl3 = readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf')); for await (const line of readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'))) {
for await (const line of rl3) {
const l = processLine(line); const l = processLine(line);
if (l && l[0] === '.') { if (l && l[0] === '.') {
domainSuffixSet.add(l.slice(1)); domainSuffixSet.add(l.slice(1));
@ -216,42 +212,42 @@ const domainSuffixSet = new Set();
.sort(sorter) .sort(sorter)
.map((i) => i.v); .map((i) => i.v);
await compareAndWriteFile( await Promise.all([
withBannerArray( compareAndWriteFile(
'Sukka\'s Surge Rules - Reject Base', withBannerArray(
[ 'Sukka\'s Surge Rules - Reject Base',
'License: AGPL 3.0', [
'Homepage: https://ruleset.skk.moe', 'License: AGPL 3.0',
'GitHub: https://github.com/SukkaW/Surge', 'Homepage: https://ruleset.skk.moe',
'', 'GitHub: https://github.com/SukkaW/Surge',
'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining', '',
'', 'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
'Build from:', '',
...HOSTS.map(host => ` - ${host[0]}`), 'Build from:',
...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`) ...HOSTS.map(host => ` - ${host[0]}`),
], ...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`)
new Date(), ],
sortedDomainSets new Date(),
sortedDomainSets
),
pathResolve(__dirname, '../List/domainset/reject.conf')
), ),
pathResolve(__dirname, '../List/domainset/reject.conf') fs.promises.writeFile(
); pathResolve(__dirname, '../List/internal/reject-stats.txt'),
Object.entries(rejectDomainsStats)
await fs.promises.writeFile( .sort((a, b) => {
pathResolve(__dirname, '../List/internal/reject-stats.txt'), const t = b[1] - a[1];
Object.entries(rejectDomainsStats) if (t === 0) {
.sort((a, b) => { return a[0].localeCompare(b[0]);
const t = b[1] - a[1]; }
if (t === 0) { return t;
return a[0].localeCompare(b[0]); })
} .map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`)
return t; .join('\n')
}) ),
.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`) // Copy reject_sukka.conf for backward compatibility
.join('\n') fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'))
); ]);
// Copy reject_sukka.conf for backward compatibility
await fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'));
console.timeEnd('* Write reject.conf'); console.timeEnd('* Write reject.conf');

View File

@ -33,9 +33,12 @@ const fileExists = (path) => {
} }
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`); const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
await fse.ensureDir(extractedPath);
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages'); const [resp] = await Promise.all([
fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages'),
fse.ensureDir(extractedPath)
]);
await pipeline( await pipeline(
Readable.fromWeb(resp.body), Readable.fromWeb(resp.body),
tar.x({ tar.x({

View File

@ -29,9 +29,7 @@ async function processDomainLists(domainListsUrl) {
/** @type Set<string> */ /** @type Set<string> */
const domainSets = new Set(); const domainSets = new Set();
const rl = await fetchRemoteTextAndCreateReadlineInterface(domainListsUrl); for await (const line of await fetchRemoteTextAndCreateReadlineInterface(domainListsUrl)) {
for await (const line of rl) {
if (line.startsWith('!')) { if (line.startsWith('!')) {
continue; continue;
} }
@ -65,9 +63,8 @@ async function processHosts(hostsUrl, includeAllSubDomain = false) {
/** @type Set<string> */ /** @type Set<string> */
const domainSets = new Set(); const domainSets = new Set();
const rl = await fetchRemoteTextAndCreateReadlineInterface(hostsUrl); for await (const l of await fetchRemoteTextAndCreateReadlineInterface(hostsUrl)) {
for await (const _line of rl) { const line = processLine(l);
const line = processLine(_line);
if (!line) { if (!line) {
continue; continue;
} }

View File

@ -10,12 +10,10 @@ const { readFileByLine } = require('./fetch-remote-text-by-line');
async function compareAndWriteFile(linesA, filePath) { async function compareAndWriteFile(linesA, filePath) {
await fse.ensureFile(filePath); await fse.ensureFile(filePath);
const rl = readFileByLine(filePath);
let isEqual = true; let isEqual = true;
let index = 0; let index = 0;
for await (const lineB of rl) { for await (const lineB of readFileByLine(filePath)) {
const lineA = linesA[index]; const lineA = linesA[index];
index++; index++;

View File

@ -14,11 +14,7 @@ const SPECIAL_SUFFIXES = new Set([
]); ]);
const validateDomainSet = async (filePath) => { const validateDomainSet = async (filePath) => {
const rl = readFileByLine( for await (const l of readFileByLine(path.resolve(__dirname, '../List/domainset', filePath))) {
path.resolve(__dirname, '../List/domainset', filePath)
);
for await (const l of rl) {
// starts with # // starts with #
const line = processLine(l); const line = processLine(l);
if (!line) { if (!line) {
@ -38,14 +34,10 @@ const validateDomainSet = async (filePath) => {
} }
}; };
const validateRuleset = async (filePath) => { const _validateRuleset = async (filePath) => {
const rl = readFileByLine(
path.resolve(__dirname, '../List/non_ip', filePath)
);
console.log(`[${filePath}]`); console.log(`[${filePath}]`);
for await (const l of rl) { for await (const l of readFileByLine(path.resolve(__dirname, '../List/non_ip', filePath))) {
// starts with # // starts with #
const line = processLine(l); const line = processLine(l);
if (!line) { if (!line) {