Make CI Faster / Readline util

This commit is contained in:
SukkaW 2023-07-26 23:14:41 +08:00
parent 41b10b4c38
commit 6da06d8506
6 changed files with 42 additions and 56 deletions

View File

@ -6,9 +6,7 @@ const { withBannerArray } = require('./lib/with-banner');
const { minifyRules } = require('./lib/minify-rules');
const { domainDeduper } = require('./lib/domain-deduper');
const { processLine } = require('./lib/process-line');
const { fetchRemoteTextAndCreateReadlineInterface } = require('./lib/fetch-remote-text-by-line');
const readline = require('readline');
const { fetchRemoteTextAndCreateReadlineInterface, readFileByLine } = require('./lib/fetch-remote-text-by-line');
(async () => {
console.time('Total Time - build-cdn-conf');
@ -64,10 +62,9 @@ const readline = require('readline');
/** @type {Set<string>} */
const cdnDomains = new Set();
for await (const line of readline.createInterface({
input: fs.createReadStream(path.resolve(__dirname, '../Source/domainset/cdn.conf'), 'utf-8'),
crlfDelay: Infinity
})) {
for await (const line of readFileByLine(
path.resolve(__dirname, '../Source/domainset/cdn.conf')
)) {
const l = processLine(line);
if (l) {
cdnDomains.add(l);

View File

@ -7,7 +7,6 @@ const { processLine } = require('./lib/process-line');
(async () => {
console.time('Total Time - build-chnroutes-cidr');
// eslint-disable-next-line node/no-unsupported-features/es-syntax -- fuck
const { merge: mergeCidrs } = await import('cidr-tools-wasm');
/** @type {Set<string>} */

View File

@ -2,10 +2,10 @@
const fs = require('fs');
const fse = require('fs-extra');
const path = require('path');
const readline = require('readline');
const { isDomainLoose } = require('./lib/is-domain-loose');
const tldts = require('tldts');
const { processLine } = require('./lib/process-line');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
/**
* @param {string} string
@ -32,12 +32,7 @@ const escapeRegExp = (string) => {
* @param {string} domainSetPath
*/
const processLocalDomainSet = async (domainSetPath) => {
for await (
const line of readline.createInterface({
input: fs.createReadStream(domainSetPath),
crlfDelay: Infinity
})
) {
for await (const line of readFileByLine(domainSetPath)) {
if (line[0] === '.') {
addApexDomain(line.slice(1));
} else if (isDomainLoose(line)) {
@ -52,12 +47,7 @@ const escapeRegExp = (string) => {
* @param {string} ruleSetPath
*/
const processLocalRuleSet = async (ruleSetPath) => {
for await (
const line of readline.createInterface({
input: fs.createReadStream(ruleSetPath),
crlfDelay: Infinity
})
) {
for await (const line of readFileByLine(ruleSetPath)) {
if (line.startsWith('DOMAIN-SUFFIX,')) {
addApexDomain(line.replace('DOMAIN-SUFFIX,', ''));
} else if (line.startsWith('DOMAIN,')) {

View File

@ -1,7 +1,6 @@
// @ts-check
const fs = require('fs');
const fse = require('fs-extra');
const readline = require('readline');
const { resolve: pathResolve } = require('path');
const { processHosts, processFilterRules } = require('./lib/parse-filter');
@ -14,6 +13,7 @@ const { compareAndWriteFile } = require('./lib/string-array-compare');
const { processLine } = require('./lib/process-line');
const { domainDeduper } = require('./lib/domain-deduper');
const createKeywordFilter = require('./lib/aho-corasick');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
/** Whitelists */
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
@ -106,10 +106,7 @@ const domainSuffixSet = new Set();
let previousSize = domainSets.size;
console.log(`Import ${previousSize} rules from Hosts / AdBlock Filter Rules!`);
const rl1 = readline.createInterface({
input: fs.createReadStream(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
const rl1 = readFileByLine(pathResolve(__dirname, '../Source/domainset/reject_sukka.conf'));
for await (const line of rl1) {
const l = processLine(line);
@ -121,10 +118,7 @@ const domainSuffixSet = new Set();
previousSize = domainSets.size - previousSize;
console.log(`Import ${previousSize} rules from reject_sukka.conf!`);
const rl2 = readline.createInterface({
input: fs.createReadStream(pathResolve(__dirname, '../List/non_ip/reject.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
const rl2 = readFileByLine(pathResolve(__dirname, '../List/non_ip/reject.conf'));
for await (const line of rl2) {
if (line.startsWith('DOMAIN-KEYWORD')) {
const [, ...keywords] = line.split(',');
@ -135,10 +129,7 @@ const domainSuffixSet = new Set();
}
}
const rl3 = readline.createInterface({
input: fs.createReadStream(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'), { encoding: 'utf-8' }),
crlfDelay: Infinity
});
const rl3 = readFileByLine(pathResolve(__dirname, '../List/domainset/reject_phishing.conf'));
for await (const line of rl3) {
const l = processLine(line);
if (l && l[0] === '.') {

View File

@ -6,6 +6,7 @@ const { join, resolve } = require('path');
const { tmpdir } = require('os');
const { Readable } = require('stream');
const { pipeline } = require('stream/promises');
const { readFileByLine } = require('./lib/fetch-remote-text-by-line');
const fileExists = (path) => {
return fs.promises.access(path, fs.constants.F_OK)
@ -14,9 +15,13 @@ const fileExists = (path) => {
};
(async () => {
const filesList = (await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' }))
.split('\n')
.filter(p => p.startsWith('List/') && !p.endsWith('/'));
const filesList = [];
for await (const line of readFileByLine(resolve(__dirname, '../.gitignore'))) {
if (line.startsWith('List/') && !line.endsWith('/')) {
filesList.push(line);
}
}
if (
!((await Promise.all(
@ -27,32 +32,25 @@ const fileExists = (path) => {
return;
}
const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
const readableNodeStream = Readable.fromWeb(resp.body);
await pipeline(
readableNodeStream,
fs.createWriteStream(tempFile)
);
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
await fse.ensureDir(extractedPath);
await tar.x({
file: tempFile,
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
await pipeline(
Readable.fromWeb(resp.body),
tar.x({
cwd: extractedPath,
filter: (p) => {
filter(p) {
return p.split('/')[1] === 'List';
}
});
})
);
await Promise.all(filesList.map(p => fse.copy(
join(extractedPath, 'Surge-gh-pages', p),
join(__dirname, '..', p),
{
overwrite: true
}
{ overwrite: true }
)));
await fs.promises.unlink(tempFile).catch(() => { });
await fs.promises.unlink(extractedPath).catch(() => { });
})();

View File

@ -1,8 +1,19 @@
// @ts-check
const fs = require('fs');
const { fetchWithRetry } = require('./fetch-retry');
const readline = require('readline');
const { Readable } = require('stream');
/**
* @param {string} path
*/
module.exports.readFileByLine = (path) => {
return readline.createInterface({
input: fs.createReadStream(path, { encoding: 'utf-8' }),
crlfDelay: Infinity
});
};
/**
* @param {import('undici').Response} resp
*/