Make CI Faster / Readline util

This commit is contained in:
SukkaW 2023-07-26 23:14:41 +08:00
parent 41b10b4c38
commit 6da06d8506
6 changed files with 42 additions and 56 deletions

View File

@ -6,9 +6,7 @@ const { withBannerArray } = require('./lib/with-banner');
const { minifyRules } = require('./lib/minify-rules'); const { minifyRules } = require('./lib/minify-rules');
const { domainDeduper } = require('./lib/domain-deduper'); const { domainDeduper } = require('./lib/domain-deduper');
const { processLine } = require('./lib/process-line'); const { processLine } = require('./lib/process-line');
const { fetchRemoteTextAndCreateReadlineInterface } = require('./lib/fetch-remote-text-by-line'); const { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } = require('./lib/fetch-remote-text-by-line');
const readline = require('readline');
(async () => { (async () => {
console.time('Total Time - build-cdn-conf'); console.time('Total Time - build-cdn-conf');
@ -64,10 +62,9 @@ const readline = require('readline');
/** @type {Set<string>} */ /** @type {Set<string>} */
const cdnDomains = new Set(); const cdnDomains = new Set();
for await (const line of readline.createInterface({ for await (const line of readFileByLine(
input: fs.createReadStream(path.resolve(__dirname, '../Source/domainset/cdn.conf'), 'utf-8'), path.resolve(__dirname, '../Source/domainset/cdn.conf')
crlfDelay: Infinity )) {
})) {
const l = processLine(line); const l = processLine(line);
if (l) { if (l) {
cdnDomains.add(l); cdnDomains.add(l);

View File

@ -7,7 +7,6 @@ const { processLine } = require('./lib/process-line');
(async () => { (async () => {
console.time('Total Time - build-chnroutes-cidr'); console.time('Total Time - build-chnroutes-cidr');
// eslint-disable-next-line node/no-unsupported-features/es-syntax -- fuck
const { merge: mergeCidrs } = await import('cidr-tools-wasm'); const { merge: mergeCidrs } = await import('cidr-tools-wasm');
/** @type {Set<string>} */ /** @type {Set<string>} */

View File

@ -2,10 +2,10 @@
const fs = require('fs'); const fs = require('fs');
const fse = require('fs-extra'); const fse = require('fs-extra');
const path = require('path'); const path = require('path');
const readline = require('readline');
const { isDomainLoose } = require('./lib/is-domain-loose'); const { isDomainLoose } = require('./lib/is-domain-loose');
const tldts = require('tldts'); const tldts = require('tldts');
const { processLine } = require('./lib/process-line'); const { processLine } = require('./lib/process-line');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
/** /**
* @param {string} string * @param {string} string
@ -32,12 +32,7 @@ const escapeRegExp = (string) => {
* @param {string} domainSetPath * @param {string} domainSetPath
*/ */
const processLocalDomainSet = async (domainSetPath) => { const processLocalDomainSet = async (domainSetPath) => {
for await ( for await (const line of readFileByLine(domainSetPath)) {
const line of readline.createInterface({
input: fs.createReadStream(domainSetPath),
crlfDelay: Infinity
})
) {
if (line[0] === '.') { if (line[0] === '.') {
addApexDomain(line.slice(1)); addApexDomain(line.slice(1));
} else if (isDomainLoose(line)) { } else if (isDomainLoose(line)) {
@ -52,12 +47,7 @@ const escapeRegExp = (string) => {
* @param {string} ruleSetPath * @param {string} ruleSetPath
*/ */
const processLocalRuleSet = async (ruleSetPath) => { const processLocalRuleSet = async (ruleSetPath) => {
for await ( for await (const line of readFileByLine(ruleSetPath)) {
const line of readline.createInterface({
input: fs.createReadStream(ruleSetPath),
crlfDelay: Infinity
})
) {
if (line.startsWith('DOMAIN-SUFFIX,')) { if (line.startsWith('DOMAIN-SUFFIX,')) {
addApexDomain(line.replace('DOMAIN-SUFFIX,', '')); addApexDomain(line.replace('DOMAIN-SUFFIX,', ''));
} else if (line.startsWith('DOMAIN,')) { } else if (line.startsWith('DOMAIN,')) {

View File

@ -1,7 +1,6 @@
// @ts-check // @ts-check
const fs = require('fs'); const fs = require('fs');
const fse = require('fs-extra'); const fse = require('fs-extra');
const readline = require('readline');
const { resolve: pathResolve } = require('path'); const { resolve: pathResolve } = require('path');
const { processHosts, processFilterRules } = require('./lib/parse-filter'); const { processHosts, processFilterRules } = require('./lib/parse-filter');
@ -14,6 +13,7 @@ const { compareAndWriteFile } = require('./lib/string-array-compare');
const { processLine } = require('./lib/process-line'); const { processLine } = require('./lib/process-line');
const { domainDeduper } = require('./lib/domain-deduper'); const { domainDeduper } = require('./lib/domain-deduper');
const createKeywordFilter = require('./lib/aho-corasick'); const createKeywordFilter = require('./lib/aho-corasick');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
/** Whitelists */ /** Whitelists */
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST); const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
@ -106,10 +106,7 @@ const domainSuffixSet = new Set();
let previousSize = domainSets.size; let previousSize = domainSets.size;
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`); console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
const rl1 = readline.createInterface({ const rl1 = readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'));
input: fs.createReadStream(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
for await (const line of rl1) { for await (const line of rl1) {
const l = processLine(line); const l = processLine(line);
@ -121,10 +118,7 @@ const domainSuffixSet = new Set();
previousSize = domainSets.size - previousSize; previousSize = domainSets.size - previousSize;
console.log(`Import ${previousSize} rules from reject_sukka.conf!`); console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
const rl2 = readline.createInterface({ const rl2 = readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf'));
input: fs.createReadStream(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
for await (const line of rl2) { for await (const line of rl2) {
if (line.startsWith('DOMAIN-KEYWORD')) { if (line.startsWith('DOMAIN-KEYWORD')) {
const [, ...keywords] = line.split(','); const [, ...keywords] = line.split(',');
@ -135,10 +129,7 @@ const domainSuffixSet = new Set();
} }
} }
const rl3 = readline.createInterface({ const rl3 = readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'));
input: fs.createReadStream(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
for await (const line of rl3) { for await (const line of rl3) {
const l = processLine(line); const l = processLine(line);
if (l && l[0] === '.') { if (l && l[0] === '.') {

View File

@ -6,6 +6,7 @@ const { join, resolve } = require('path');
const { tmpdir } = require('os'); const { tmpdir } = require('os');
const { Readable } = require('stream'); const { Readable } = require('stream');
const { pipeline } = require('stream/promises'); const { pipeline } = require('stream/promises');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
const fileExists = (path) => { const fileExists = (path) => {
return fs.promises.access(path, fs.constants.F_OK) return fs.promises.access(path, fs.constants.F_OK)
@ -14,9 +15,13 @@ const fileExists = (path) => {
}; };
(async () => { (async () => {
const filesList = (await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' })) const filesList = [];
.split('\n')
.filter(p => p.startsWith('List/') && !p.endsWith('/')); for await (const line of readFileByLine(resolve(__dirname, '../.gitignore'))) {
if (line.startsWith('List/') && !line.endsWith('/')) {
filesList.push(line);
}
}
if ( if (
!((await Promise.all( !((await Promise.all(
@ -27,32 +32,25 @@ const fileExists = (path) => {
return; return;
} }
const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
const readableNodeStream = Readable.fromWeb(resp.body);
await pipeline(
readableNodeStream,
fs.createWriteStream(tempFile)
);
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`); const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
await fse.ensureDir(extractedPath); await fse.ensureDir(extractedPath);
await tar.x({
file: tempFile, const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
cwd: extractedPath, await pipeline(
filter: (p) => { Readable.fromWeb(resp.body),
return p.split('/')[1] === 'List'; tar.x({
} cwd: extractedPath,
}); filter(p) {
return p.split('/')[1] === 'List';
}
})
);
await Promise.all(filesList.map(p => fse.copy( await Promise.all(filesList.map(p => fse.copy(
join(extractedPath, 'Surge-gh-pages', p), join(extractedPath, 'Surge-gh-pages', p),
join(__dirname, '..', p), join(__dirname, '..', p),
{ { overwrite: true }
overwrite: true
}
))); )));
await fs.promises.unlink(tempFile).catch(() => { });
await fs.promises.unlink(extractedPath).catch(() => { }); await fs.promises.unlink(extractedPath).catch(() => { });
})(); })();

View File

@ -1,8 +1,19 @@
// @ts-check // @ts-check
const fs = require('fs');
const { fetchWithRetry } = require('./fetch-retry'); const { fetchWithRetry } = require('./fetch-retry');
const readline = require('readline'); const readline = require('readline');
const { Readable } = require('stream'); const { Readable } = require('stream');
/**
* @param {string} path
*/
module.exports.readFileByLine = (path) => {
return readline.createInterface({
input: fs.createReadStream(path, { encoding: 'utf-8' }),
crlfDelay: Infinity
});
};
/** /**
* @param {import('undici').Response} resp * @param {import('undici').Response} resp
*/ */