Chore: more refactor to the bun

This commit is contained in:
SukkaW
2023-11-15 15:20:37 +08:00
parent 37257958c2
commit 071b8120a6
36 changed files with 200 additions and 250 deletions

View File

@@ -1,5 +1,4 @@
// @ts-check
const fs = require('fs');
const { readFileByLine } = require('./fetch-remote-text-by-line');
const { surgeDomainsetToClashDomainset, surgeRulesetToClashClassicalTextRuleset } = require('./clash');
@@ -9,7 +8,9 @@ const { surgeDomainsetToClashDomainset, surgeRulesetToClashClassicalTextRuleset
*/
async function compareAndWriteFile(linesA, filePath) {
let isEqual = true;
if (!fs.existsSync(filePath)) {
const file = Bun.file(filePath);
if (!(await file.exists())) {
console.log(`${filePath} does not exists, writing...`);
isEqual = false;
} else if (linesA.length === 0) {
@@ -44,7 +45,6 @@ async function compareAndWriteFile(linesA, filePath) {
}
if (!isEqual) {
const file = Bun.file(filePath);
const writer = file.writer();
for (let i = 0, len = linesA.length; i < len; i++) {

View File

@@ -1,32 +0,0 @@
// @ts-check
const createTrie = require('./trie');
/**
* @param {string[]} inputDomains
*/
const domainDeduper = (inputDomains) => {
const trie = createTrie(inputDomains);
const sets = new Set(inputDomains);
for (let j = 0, len = inputDomains.length; j < len; j++) {
const d = inputDomains[j];
if (d[0] !== '.') {
continue;
}
// delete all included subdomains (ends with `.example.com`)
// eslint-disable-next-line sukka/unicorn/no-array-method-this-argument -- it is not an array
trie.find(d, false).forEach(f => sets.delete(f));
// if `.example.com` exists, then `example.com` should also be removed
const a = d.slice(1);
if (sets.has(a)) {
sets.delete(a);
}
}
return Array.from(sets);
};
module.exports.domainDeduper = domainDeduper;

View File

@@ -0,0 +1,25 @@
import createTrie from './trie';
const domainDeduper = (inputDomains: string[]): string[] => {
const trie = createTrie(inputDomains);
const sets = new Set(inputDomains);
for (let j = 0, len = inputDomains.length; j < len; j++) {
const d = inputDomains[j];
if (d[0] !== '.') {
continue;
}
trie.find(d, false).forEach(f => sets.delete(f));
const a: string = d.slice(1);
if (sets.has(a)) {
sets.delete(a);
}
}
return Array.from(sets);
};
export default domainDeduper;

View File

@@ -1,14 +1,16 @@
// @ts-check
const { fetchWithRetry } = require('./fetch-retry');
import type { BunFile } from 'bun';
import { fetchWithRetry } from './fetch-retry';
const decoder = new TextDecoder('utf-8');
/**
* @param {string} path
*/
module.exports.readFileByLine = async function *(path) {
export async function* readFileByLine(file: string | BunFile): AsyncGenerator<string> {
if (typeof file === 'string') {
file = Bun.file(file);
}
let buf = '';
for await (const chunk of Bun.file(path).stream()) {
for await (const chunk of file.stream()) {
const chunkStr = decoder.decode(chunk).replaceAll('\r\n', '\n');
for (let i = 0, len = chunkStr.length; i < len; i++) {
const char = chunkStr[i];
@@ -24,12 +26,9 @@ module.exports.readFileByLine = async function *(path) {
if (buf) {
yield buf;
}
};
}
/**
* @param {import('undici').Response} resp
*/
const createReadlineInterfaceFromResponse = async function *(resp) {
export async function* createReadlineInterfaceFromResponse(resp: Response): AsyncGenerator<string> {
if (!resp.body) {
throw new Error('Failed to fetch remote text');
}
@@ -55,15 +54,9 @@ const createReadlineInterfaceFromResponse = async function *(resp) {
if (buf) {
yield buf;
}
};
}
module.exports.createReadlineInterfaceFromResponse = createReadlineInterfaceFromResponse;
/**
* @param {import('undici').RequestInfo} url
* @param {import('undici').RequestInit} [opt]
*/
module.exports.fetchRemoteTextAndCreateReadlineInterface = async (url, opt) => {
export async function fetchRemoteTextAndCreateReadlineInterface(url: string | URL, opt?: RequestInit): Promise<AsyncGenerator<string>> {
const resp = await fetchWithRetry(url, opt);
return createReadlineInterfaceFromResponse(resp);
};
}

View File

@@ -1,11 +1,3 @@
// @ts-check
const undici = require('undici');
// Enable HTTP/2 supports
// undici.setGlobalDispatcher(new undici.Agent({
// allowH2: true,
// pipelining: 10
// }));
const fetchWithRetry = /** @type {import('undici').fetch} */(require('@vercel/fetch-retry')(undici.fetch));
const fetchWithRetry = require('@vercel/fetch-retry')(fetch);
module.exports.fetchWithRetry = fetchWithRetry;

View File

@@ -1,5 +1,4 @@
const { toASCII } = require('punycode/');
const fs = require('fs');
const path = require('path');
const { traceAsync } = require('./trace-runner');
@@ -7,15 +6,14 @@ const publicSuffixPath = path.resolve(__dirname, '../../node_modules/.cache/publ
const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
const customFetch = async (url) => {
const buf = await fs.promises.readFile(url);
return {
arrayBuffer() { return Promise.resolve(buf.buffer); }
};
return Bun.file(url);
};
const publicSuffixFile = Bun.file(publicSuffixPath);
const [publicSuffixListDat, { default: gorhill }] = await Promise.all([
fs.existsSync(publicSuffixPath)
? fs.promises.readFile(publicSuffixPath, 'utf-8')
await publicSuffixFile.exists()
? publicSuffixFile.text()
: fetch('https://publicsuffix.org/list/public_suffix_list.dat').then(r => {
console.log('public_suffix_list.dat not found, fetch directly from remote.');
return r.text();

View File

@@ -1,17 +1,13 @@
const { fetchRemoteTextAndCreateReadlineInterface } = require('./fetch-remote-text-by-line');
const tldts = require('tldts');
import { fetchRemoteTextAndCreateReadlineInterface } from './fetch-remote-text-by-line';
import tldts from 'tldts';
const isDomainLoose = (domain) => {
const isDomainLoose = (domain: string): boolean => {
const { isIcann, isPrivate, isIp } = tldts.parse(domain);
return !!(!isIp && (isIcann || isPrivate));
};
/**
* @param {string | URL} url
*/
const parseFelixDnsmasq = async (url) => {
/** @type {string[]} */
const res = [];
const parseFelixDnsmasq = async (url: string | URL): Promise<string[]> => {
const res: string[] = [];
for await (const line of await fetchRemoteTextAndCreateReadlineInterface(url)) {
if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
const domain = line.replace('server=/', '').replace('/114.114.114.114', '');
@@ -24,4 +20,4 @@ const parseFelixDnsmasq = async (url) => {
return res;
};
module.exports.parseFelixDnsmasq = parseFelixDnsmasq;
export { parseFelixDnsmasq };

View File

@@ -11,6 +11,13 @@ const DEBUG_DOMAIN_TO_FIND = null; // example.com | null
let foundDebugDomain = false;
const warnOnceUrl = new Set();
/**
*
* @param {string} url
* @param {boolean} isWhite
* @param {...any} message
* @returns
*/
const warnOnce = (url, isWhite, ...message) => {
const key = `${url}${isWhite ? 'white' : 'black'}`;
if (warnOnceUrl.has(key)) {
@@ -153,6 +160,9 @@ async function processFilterRules(filterRulesUrl, fallbackUrls) {
let downloadTime = 0;
const gorhill = await getGorhillPublicSuffixPromise();
/**
* @param {string} line
*/
const lineCb = (line) => {
const result = parse(line, gorhill);
if (result) {

View File

@@ -1,48 +0,0 @@
/* eslint-disable camelcase -- cache index access */
/**
* If line is commented out or empty, return null.
* Otherwise, return trimmed line.
*
* @param {string} line
*/
const processLine = (line) => {
if (!line) {
return null;
}
const line_0 = line[0];
if (
line_0 === '#'
|| line_0 === ' '
|| line_0 === '\r'
|| line_0 === '\n'
|| line_0 === '!'
) {
return null;
}
const trimmed = line.trim();
if (trimmed === '') {
return null;
}
return trimmed;
};
module.exports.processLine = processLine;
/**
* @param {import('readline').ReadLine} rl
*/
module.exports.processLineFromReadline = async (rl) => {
/** @type {string[]} */
const res = [];
for await (const line of rl) {
const l = processLine(line);
if (l) {
res.push(l);
}
}
return res;
};

35
Build/lib/process-line.ts Normal file
View File

@@ -0,0 +1,35 @@
export const processLine = (line: string): string | null => {
if (!line) {
return null;
}
const line_0: string = line[0];
if (
line_0 === '#'
|| line_0 === ' '
|| line_0 === '\r'
|| line_0 === '\n'
|| line_0 === '!'
) {
return null;
}
const trimmed: string = line.trim();
if (trimmed === '') {
return null;
}
return trimmed;
};
export const processLineFromReadline = async (rl: AsyncGenerator<string>): Promise<string[]> => {
const res: string[] = [];
for await (const line of rl) {
const l: string | null = processLine(line);
if (l) {
res.push(l);
}
}
return res;
};

View File

@@ -1,6 +1,4 @@
// @ts-check
/** @type {[string, boolean][]} */
const HOSTS = [
export const HOSTS: [string, boolean][] = [
// ['https://pgl.yoyo.org/adservers/serverlist.php?hostformat=hosts&showintro=0&mimetype=plaintext', false],
['https://raw.githubusercontent.com/hoshsadiq/adblock-nocoin-list/master/hosts.txt', false],
['https://raw.githubusercontent.com/crazy-max/WindowsSpyBlocker/master/data/hosts/spy.txt', false],
@@ -9,7 +7,7 @@ const HOSTS = [
['https://raw.githubusercontent.com/durablenapkin/block/master/luminati.txt', false]
];
const ADGUARD_FILTERS = /** @type {const} */([
export const ADGUARD_FILTERS = [
// EasyList
[
'https://easylist.to/easylist/easylist.txt',
@@ -146,9 +144,9 @@ const ADGUARD_FILTERS = /** @type {const} */([
'https://paulgb.github.io/BarbBlock/blacklists/ublock-origin.txt',
// Brave First Party & First Party CNAME
'https://raw.githubusercontent.com/brave/adblock-lists/master/brave-lists/brave-firstparty.txt'
]);
] as const;
const PREDEFINED_WHITELIST = [
export const PREDEFINED_WHITELIST = [
'localhost',
'broadcasthost',
'ip6-loopback',
@@ -194,11 +192,11 @@ const PREDEFINED_WHITELIST = [
'vlscppe.microsoft.com'
];
const PREDEFINED_ENFORCED_BACKLIST = [
export const PREDEFINED_ENFORCED_BACKLIST = [
'telemetry.mozilla.org'
];
const PREDEFINED_ENFORCED_WHITELIST = [
export const PREDEFINED_ENFORCED_WHITELIST = [
'godaddysites.com',
'web.app',
'firebaseapp.com',
@@ -219,9 +217,3 @@ const PREDEFINED_ENFORCED_WHITELIST = [
'blogspot.com',
'appspot.com'
];
module.exports.HOSTS = HOSTS;
module.exports.ADGUARD_FILTERS = ADGUARD_FILTERS;
module.exports.PREDEFINED_WHITELIST = PREDEFINED_WHITELIST;
module.exports.PREDEFINED_ENFORCED_BACKLIST = PREDEFINED_ENFORCED_BACKLIST;
module.exports.PREDEFINED_ENFORCED_WHITELIST = PREDEFINED_ENFORCED_WHITELIST;

View File

@@ -2,23 +2,23 @@
* Suffix Trie based on Mnemonist Trie
*/
const SENTINEL = String.fromCodePoint(0);
export const SENTINEL: string = String.fromCodePoint(0);
/**
* @param {string[] | Set<string>} [from]
*/
const createTrie = (from) => {
let size = 0;
const root = {};
export const createTrie = (from?: string[] | Set<string>) => {
let size: number = 0;
const root: any = {};
/**
* Method used to add the given prefix to the trie.
*
* @param {string} suffix - Prefix to follow.
*/
const add = (suffix) => {
let node = root;
let token;
const add = (suffix: string): void => {
let node: any = root;
let token: string;
for (let i = suffix.length - 1; i >= 0; i--) {
token = suffix[i];
node[token] ||= {};
@@ -35,9 +35,9 @@ const createTrie = (from) => {
/**
* @param {string} suffix
*/
const contains = (suffix) => {
let node = root;
let token;
const contains = (suffix: string): boolean => {
let node: any = root;
let token: string;
for (let i = suffix.length - 1; i >= 0; i--) {
token = suffix[i];
@@ -56,10 +56,10 @@ const createTrie = (from) => {
* @param {boolean} [includeEqualWithSuffix]
* @return {string[]}
*/
const find = (suffix, includeEqualWithSuffix = true) => {
let node = root;
const matches = [];
let token;
const find = (suffix: string, includeEqualWithSuffix: boolean = true): string[] => {
let node: any = root;
const matches: string[] = [];
let token: string;
for (let i = suffix.length - 1; i >= 0; i--) {
token = suffix[i];
@@ -70,15 +70,15 @@ const createTrie = (from) => {
}
// Performing DFS from prefix
const nodeStack = [node];
const nodeStack: any[] = [node];
const suffixStack = [suffix];
let k;
const suffixStack: string[] = [suffix];
let k: string;
let $suffix = suffix;
let $suffix: string = suffix;
while (nodeStack.length) {
$suffix = suffixStack.pop();
$suffix = suffixStack.pop()!;
node = nodeStack.pop();
// eslint-disable-next-line guard-for-in -- plain object
@@ -105,12 +105,12 @@ const createTrie = (from) => {
* @param {string} suffix - Prefix to delete.
* @return {boolean}
*/
const remove = (suffix) => {
let node = root;
let toPrune = null;
let tokenToPrune = null;
let parent;
let token;
const remove = (suffix: string): boolean => {
let node: any = root;
let toPrune: any = null;
let tokenToPrune: string | null = null;
let parent: any;
let token: string;
for (let i = suffix.length - 1; i >= 0; i--) {
token = suffix[i];
@@ -138,7 +138,7 @@ const createTrie = (from) => {
size--;
if (toPrune) {
if (tokenToPrune) {
delete toPrune[tokenToPrune];
} else {
delete node[SENTINEL];
@@ -153,8 +153,8 @@ const createTrie = (from) => {
* @param {string} suffix - Prefix to check.
* @return {boolean}
*/
const has = (suffix) => {
let node = root;
const has = (suffix: string): boolean => {
let node: any = root;
for (let i = suffix.length - 1; i >= 0; i--) {
node = node[suffix[i]];
@@ -445,8 +445,4 @@ const createTrie = (from) => {
// };
// }
/**
* Exporting.
*/
module.exports.SENTINEL = SENTINEL;
module.exports = createTrie;
export default createTrie;