mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Perf: make build faster
This commit is contained in:
parent
e58ad2c0ac
commit
82f10868c1
@ -10,11 +10,9 @@ const { minifyRules } = require('./lib/minify-rules');
|
||||
console.time('Total Time - build-anti-bogus-domain');
|
||||
console.time('* Download bogus-nxdomain-list');
|
||||
|
||||
const rl = await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf');
|
||||
|
||||
/** @type {string[]} */
|
||||
const res = [];
|
||||
for await (const line of rl) {
|
||||
for await (const line of await fetchRemoteTextAndCreateReadlineInterface('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf')) {
|
||||
if (line.startsWith('bogus-nxdomain=')) {
|
||||
res.push(line.replace('bogus-nxdomain=', ''));
|
||||
}
|
||||
|
||||
@ -40,22 +40,6 @@ const Trie = require('./lib/trie');
|
||||
}
|
||||
}
|
||||
|
||||
await compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - CDN Domains',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'This file contains object storage and static assets CDN domains.'
|
||||
],
|
||||
new Date(),
|
||||
minifyRules(cdnDomainsList)
|
||||
),
|
||||
path.resolve(__dirname, '../List/non_ip/cdn.conf')
|
||||
);
|
||||
|
||||
/**
|
||||
* Dedupe cdn.conf
|
||||
*/
|
||||
@ -71,21 +55,38 @@ const Trie = require('./lib/trie');
|
||||
}
|
||||
}
|
||||
|
||||
await compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - CDN Domains',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'This file contains object storage and static assets CDN domains.'
|
||||
],
|
||||
new Date(),
|
||||
minifyRules(domainDeduper(Array.from(cdnDomains)))
|
||||
await Promise.all([
|
||||
compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - CDN Domains',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'This file contains object storage and static assets CDN domains.'
|
||||
],
|
||||
new Date(),
|
||||
minifyRules(cdnDomainsList)
|
||||
),
|
||||
path.resolve(__dirname, '../List/non_ip/cdn.conf')
|
||||
),
|
||||
path.resolve(__dirname, '../List/domainset/cdn.conf')
|
||||
);
|
||||
compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - CDN Domains',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'This file contains object storage and static assets CDN domains.'
|
||||
],
|
||||
new Date(),
|
||||
minifyRules(domainDeduper(Array.from(cdnDomains)))
|
||||
),
|
||||
path.resolve(__dirname, '../List/domainset/cdn.conf')
|
||||
)
|
||||
]);
|
||||
|
||||
console.timeEnd('Total Time - build-cdn-conf');
|
||||
})();
|
||||
|
||||
@ -62,17 +62,19 @@ const escapeRegExp = (string) => {
|
||||
}
|
||||
};
|
||||
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf'));
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf'));
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf'));
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf'));
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf'));
|
||||
await processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf'));
|
||||
await Promise.all([
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/cdn.conf')),
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global.conf')),
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/global_plus.conf')),
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/my_proxy.conf')),
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/stream.conf')),
|
||||
processLocalRuleSet(path.resolve(__dirname, '../List/non_ip/telegram.conf')),
|
||||
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf')),
|
||||
processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf')),
|
||||
|
||||
await processLocalDomainSet(path.resolve(__dirname, '../List/domainset/cdn.conf'));
|
||||
await processLocalDomainSet(path.resolve(__dirname, '../List/domainset/download.conf'));
|
||||
fse.ensureDir(path.resolve(__dirname, '../List/internal'))
|
||||
]);
|
||||
|
||||
await fse.ensureDir(path.resolve(__dirname, '../List/internal'));
|
||||
await fs.promises.writeFile(
|
||||
path.resolve(__dirname, '../List/internal/cdn.txt'),
|
||||
[
|
||||
|
||||
@ -5,9 +5,11 @@ const fs = require('fs');
|
||||
const { parseFelixDnsmasq } = require('./lib/parse-dnsmasq');
|
||||
|
||||
(async () => {
|
||||
const result = await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf');
|
||||
const [result] = await Promise.all([
|
||||
parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'),
|
||||
fse.ensureDir(path.resolve(__dirname, '../List/internal'))
|
||||
]);
|
||||
|
||||
await fse.ensureDir(path.resolve(__dirname, '../List/internal'));
|
||||
await fs.promises.writeFile(
|
||||
path.resolve(__dirname, '../List/internal/accelerated-china-domains.txt'),
|
||||
`${result.map(line => `SUFFIX,${line}`).join('\n')}\n`
|
||||
|
||||
@ -106,9 +106,7 @@ const domainSuffixSet = new Set();
|
||||
let previousSize = domainSets.size;
|
||||
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
|
||||
|
||||
const rl1 = readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'));
|
||||
|
||||
for await (const line of rl1) {
|
||||
for await (const line of readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'))) {
|
||||
const l = processLine(line);
|
||||
if (l) {
|
||||
domainSets.add(l);
|
||||
@ -118,8 +116,7 @@ const domainSuffixSet = new Set();
|
||||
previousSize = domainSets.size - previousSize;
|
||||
console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
|
||||
|
||||
const rl2 = readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf'));
|
||||
for await (const line of rl2) {
|
||||
for await (const line of readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf'))) {
|
||||
if (line.startsWith('DOMAIN-KEYWORD')) {
|
||||
const [, ...keywords] = line.split(',');
|
||||
domainKeywordsSet.add(keywords.join(',').trim());
|
||||
@ -129,8 +126,7 @@ const domainSuffixSet = new Set();
|
||||
}
|
||||
}
|
||||
|
||||
const rl3 = readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'));
|
||||
for await (const line of rl3) {
|
||||
for await (const line of readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'))) {
|
||||
const l = processLine(line);
|
||||
if (l && l[0] === '.') {
|
||||
domainSuffixSet.add(l.slice(1));
|
||||
@ -216,42 +212,42 @@ const domainSuffixSet = new Set();
|
||||
.sort(sorter)
|
||||
.map((i) => i.v);
|
||||
|
||||
await compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - Reject Base',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
|
||||
'',
|
||||
'Build from:',
|
||||
...HOSTS.map(host => ` - ${host[0]}`),
|
||||
...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`)
|
||||
],
|
||||
new Date(),
|
||||
sortedDomainSets
|
||||
await Promise.all([
|
||||
compareAndWriteFile(
|
||||
withBannerArray(
|
||||
'Sukka\'s Surge Rules - Reject Base',
|
||||
[
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'',
|
||||
'The domainset supports AD blocking, tracking protection, privacy protection, anti-phishing, anti-mining',
|
||||
'',
|
||||
'Build from:',
|
||||
...HOSTS.map(host => ` - ${host[0]}`),
|
||||
...ADGUARD_FILTERS.map(filter => ` - ${Array.isArray(filter) ? filter[0] : filter}`)
|
||||
],
|
||||
new Date(),
|
||||
sortedDomainSets
|
||||
),
|
||||
pathResolve(__dirname, '../List/domainset/reject.conf')
|
||||
),
|
||||
pathResolve(__dirname, '../List/domainset/reject.conf')
|
||||
);
|
||||
|
||||
await fs.promises.writeFile(
|
||||
pathResolve(__dirname, '../List/internal/reject-stats.txt'),
|
||||
Object.entries(rejectDomainsStats)
|
||||
.sort((a, b) => {
|
||||
const t = b[1] - a[1];
|
||||
if (t === 0) {
|
||||
return a[0].localeCompare(b[0]);
|
||||
}
|
||||
return t;
|
||||
})
|
||||
.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`)
|
||||
.join('\n')
|
||||
);
|
||||
|
||||
// Copy reject_sukka.conf for backward compatibility
|
||||
await fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'));
|
||||
fs.promises.writeFile(
|
||||
pathResolve(__dirname, '../List/internal/reject-stats.txt'),
|
||||
Object.entries(rejectDomainsStats)
|
||||
.sort((a, b) => {
|
||||
const t = b[1] - a[1];
|
||||
if (t === 0) {
|
||||
return a[0].localeCompare(b[0]);
|
||||
}
|
||||
return t;
|
||||
})
|
||||
.map(([domain, count]) => `${domain}${' '.repeat(100 - domain.length)}${count}`)
|
||||
.join('\n')
|
||||
),
|
||||
// Copy reject_sukka.conf for backward compatibility
|
||||
fse.copy(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), pathResolve(__dirname, '../List/domainset/reject_sukka.conf'))
|
||||
]);
|
||||
|
||||
console.timeEnd('* Write reject.conf');
|
||||
|
||||
|
||||
@ -33,9 +33,12 @@ const fileExists = (path) => {
|
||||
}
|
||||
|
||||
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
|
||||
await fse.ensureDir(extractedPath);
|
||||
|
||||
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
|
||||
const [resp] = await Promise.all([
|
||||
fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages'),
|
||||
fse.ensureDir(extractedPath)
|
||||
]);
|
||||
|
||||
await pipeline(
|
||||
Readable.fromWeb(resp.body),
|
||||
tar.x({
|
||||
|
||||
@ -29,9 +29,7 @@ async function processDomainLists(domainListsUrl) {
|
||||
/** @type Set<string> */
|
||||
const domainSets = new Set();
|
||||
|
||||
const rl = await fetchRemoteTextAndCreateReadlineInterface(domainListsUrl);
|
||||
|
||||
for await (const line of rl) {
|
||||
for await (const line of await fetchRemoteTextAndCreateReadlineInterface(domainListsUrl)) {
|
||||
if (line.startsWith('!')) {
|
||||
continue;
|
||||
}
|
||||
@ -65,9 +63,8 @@ async function processHosts(hostsUrl, includeAllSubDomain = false) {
|
||||
/** @type Set<string> */
|
||||
const domainSets = new Set();
|
||||
|
||||
const rl = await fetchRemoteTextAndCreateReadlineInterface(hostsUrl);
|
||||
for await (const _line of rl) {
|
||||
const line = processLine(_line);
|
||||
for await (const l of await fetchRemoteTextAndCreateReadlineInterface(hostsUrl)) {
|
||||
const line = processLine(l);
|
||||
if (!line) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -10,12 +10,10 @@ const { readFileByLine } = require('./fetch-remote-text-by-line');
|
||||
async function compareAndWriteFile(linesA, filePath) {
|
||||
await fse.ensureFile(filePath);
|
||||
|
||||
const rl = readFileByLine(filePath);
|
||||
|
||||
let isEqual = true;
|
||||
let index = 0;
|
||||
|
||||
for await (const lineB of rl) {
|
||||
for await (const lineB of readFileByLine(filePath)) {
|
||||
const lineA = linesA[index];
|
||||
index++;
|
||||
|
||||
|
||||
@ -14,11 +14,7 @@ const SPECIAL_SUFFIXES = new Set([
|
||||
]);
|
||||
|
||||
const validateDomainSet = async (filePath) => {
|
||||
const rl = readFileByLine(
|
||||
path.resolve(__dirname, '../List/domainset', filePath)
|
||||
);
|
||||
|
||||
for await (const l of rl) {
|
||||
for await (const l of readFileByLine(path.resolve(__dirname, '../List/domainset', filePath))) {
|
||||
// starts with #
|
||||
const line = processLine(l);
|
||||
if (!line) {
|
||||
@ -38,14 +34,10 @@ const validateDomainSet = async (filePath) => {
|
||||
}
|
||||
};
|
||||
|
||||
const validateRuleset = async (filePath) => {
|
||||
const rl = readFileByLine(
|
||||
path.resolve(__dirname, '../List/non_ip', filePath)
|
||||
);
|
||||
|
||||
const _validateRuleset = async (filePath) => {
|
||||
console.log(`[${filePath}]`);
|
||||
|
||||
for await (const l of rl) {
|
||||
for await (const l of readFileByLine(path.resolve(__dirname, '../List/non_ip', filePath))) {
|
||||
// starts with #
|
||||
const line = processLine(l);
|
||||
if (!line) {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user