mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Make ESLint Happy
This commit is contained in:
parent
34ef0e58ff
commit
d4ff4c5b2d
@ -18,7 +18,7 @@ const outputSurgeDir = path.resolve(import.meta.dir, '../List');
|
||||
const outputClashDir = path.resolve(import.meta.dir, '../Clash');
|
||||
|
||||
export const buildCommon = task(import.meta.path, async () => {
|
||||
const promises: Promise<unknown>[] = [];
|
||||
const promises: Array<Promise<unknown>> = [];
|
||||
|
||||
const pw = new PathScurry(sourceDir);
|
||||
for await (const entry of pw) {
|
||||
@ -57,7 +57,6 @@ const processFile = async (sourcePath: string) => {
|
||||
let title = '';
|
||||
const descriptions: string[] = [];
|
||||
|
||||
|
||||
try {
|
||||
for await (const line of readFileByLine(sourcePath)) {
|
||||
if (line === MAGIC_COMMAND_SKIP) {
|
||||
|
||||
@ -11,15 +11,10 @@ export const buildDomesticRuleset = task(import.meta.path, async () => {
|
||||
const results = await processLineFromReadline(readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/domestic.conf')));
|
||||
|
||||
results.push(
|
||||
...Object.entries(DOMESTICS)
|
||||
.reduce<string[]>(
|
||||
(acc, [key, { domains }]) => {
|
||||
...Object.entries(DOMESTICS).reduce<string[]>((acc, [key, { domains }]) => {
|
||||
if (key === 'SYSTEM') return acc;
|
||||
return [...acc, ...domains];
|
||||
},
|
||||
[]
|
||||
)
|
||||
.map((domain) => `DOMAIN-SUFFIX,${domain}`)
|
||||
}, []).map((domain) => `DOMAIN-SUFFIX,${domain}`)
|
||||
);
|
||||
|
||||
const rulesetDescription = [
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
// @ts-check
|
||||
import fsp from 'fs/promises'
|
||||
import fsp from 'fs/promises';
|
||||
import path from 'path';
|
||||
import * as tldts from 'tldts';
|
||||
import { processLine } from './lib/process-line';
|
||||
@ -14,7 +13,7 @@ const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, '
|
||||
|
||||
export const buildInternalCDNDomains = task(import.meta.path, async () => {
|
||||
const set = new Set<string>();
|
||||
const keywords = new Set();
|
||||
const keywords = new Set<string>();
|
||||
|
||||
const gorhill = await getGorhillPublicSuffixPromise();
|
||||
const domainSorter = createDomainSorter(gorhill);
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import path from 'path';
|
||||
import fsp from 'fs/promises'
|
||||
import fsp from 'fs/promises';
|
||||
import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
|
||||
import { task } from './lib/trace-runner';
|
||||
import { compareAndWriteFile } from './lib/create-file';
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { fetchRemoteTextAndCreateReadlineInterface } from './lib/fetch-remote-text-by-line';
|
||||
import { processLineFromReadline } from './lib/process-line';
|
||||
import path from 'path';
|
||||
import fsp from 'fs/promises'
|
||||
import fsp from 'fs/promises';
|
||||
import { task } from './lib/trace-runner';
|
||||
|
||||
const RESERVED_IPV4_CIDR = [
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const fsPromises = require('fs').promises;
|
||||
const pathFn = require('path');
|
||||
const table = require('table');
|
||||
import listDir from '@sukka/listdir';
|
||||
const listDir = require('@sukka/listdir');
|
||||
const { green, yellow } = require('picocolors');
|
||||
|
||||
const PRESET_MITM_HOSTNAMES = [
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import { processFilterRules, processHosts } from './lib/parse-filter';
|
||||
import { processHosts } from './lib/parse-filter';
|
||||
import path from 'path';
|
||||
import { createRuleset } from './lib/create-file';
|
||||
import { processLine } from './lib/process-line';
|
||||
import { createDomainSorter } from './lib/stable-sort-domain';
|
||||
import { traceSync, task } from './lib/trace-runner';
|
||||
import createTrie from './lib/trie';
|
||||
import { createTrie } from './lib/trie';
|
||||
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
|
||||
import { createCachedGorhillGetDomain } from './lib/cached-tld-parse';
|
||||
import * as tldts from 'tldts';
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import listDir from '@sukka/listdir';
|
||||
import path from 'path';
|
||||
import fsp from 'fs/promises'
|
||||
import fsp from 'fs/promises';
|
||||
import { task } from './lib/trace-runner';
|
||||
|
||||
const rootPath = path.resolve(import.meta.dir, '../');
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
// @ts-check
|
||||
import fsp from 'fs/promises'
|
||||
import fsp from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { processHosts, processFilterRules } from './lib/parse-filter';
|
||||
import createTrie from './lib/trie';
|
||||
import { createTrie } from './lib/trie';
|
||||
|
||||
import { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, PREDEFINED_ENFORCED_BACKLIST } from './lib/reject-data-source';
|
||||
import { createRuleset, compareAndWriteFile } from './lib/create-file';
|
||||
@ -20,13 +20,13 @@ import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
/** Whitelists */
|
||||
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
||||
/** @type {Set<string>} Dedupe domains inclued by DOMAIN-KEYWORD */
|
||||
const domainKeywordsSet: Set<string> = new Set();
|
||||
const domainKeywordsSet = new Set<string>();
|
||||
/** @type {Set<string>} Dedupe domains included by DOMAIN-SUFFIX */
|
||||
const domainSuffixSet: Set<string> = new Set();
|
||||
const domainSuffixSet = new Set<string>();
|
||||
|
||||
export const buildRejectDomainSet = task(import.meta.path, async () => {
|
||||
/** @type Set<string> */
|
||||
const domainSets: Set<string> = new Set();
|
||||
const domainSets = new Set<string>();
|
||||
|
||||
// Parse from AdGuard Filters
|
||||
console.time('* Download and process Hosts / AdBlock Filter Rules');
|
||||
@ -91,7 +91,6 @@ export const buildRejectDomainSet = task(import.meta.path, async () => {
|
||||
console.timeEnd('* Download and process Hosts / AdBlock Filter Rules');
|
||||
|
||||
if (shouldStop) {
|
||||
// eslint-disable-next-line n/no-process-exit -- force stop
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -173,7 +172,7 @@ export const buildRejectDomainSet = task(import.meta.path, async () => {
|
||||
console.log(`Deduped ${previousSize - dudupedDominArray.length} rules!`);
|
||||
|
||||
// Create reject stats
|
||||
const rejectDomainsStats: [string, number][] = traceSync(
|
||||
const rejectDomainsStats: Array<[string, number]> = traceSync(
|
||||
'* Collect reject domain stats',
|
||||
() => Object.entries(
|
||||
dudupedDominArray.reduce<Record<string, number>>((acc, cur) => {
|
||||
|
||||
@ -14,7 +14,7 @@ const s = new Sema(3);
|
||||
const latestTopUserAgentsPromise = fetchWithRetry('https://unpkg.com/top-user-agents@latest/index.json')
|
||||
.then(res => res.json() as Promise<string[]>);
|
||||
|
||||
const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> => {
|
||||
const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>> => {
|
||||
const [topUserAgents] = await Promise.all([
|
||||
latestTopUserAgentsPromise,
|
||||
s.acquire()
|
||||
@ -42,10 +42,10 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> =>
|
||||
}
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(res.statusText + '\n' + await res.text());
|
||||
throw new Error(`${res.statusText}\n${await res.text()}`);
|
||||
}
|
||||
|
||||
const json = await res.json() as { url: string; }[];
|
||||
const json = await res.json() as Array<{ url: string }>;
|
||||
s.release();
|
||||
|
||||
console.timeEnd(key);
|
||||
@ -60,7 +60,7 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> =>
|
||||
|
||||
export const buildSpeedtestDomainSet = task(import.meta.path, async () => {
|
||||
/** @type {Set<string>} */
|
||||
const domains: Set<string> = new Set([
|
||||
const domains = new Set<string>([
|
||||
'.speedtest.net',
|
||||
'.speedtestcustom.com',
|
||||
'.ooklaserver.net',
|
||||
|
||||
@ -7,7 +7,7 @@ import { createRuleset } from './lib/create-file';
|
||||
import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
|
||||
const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => {
|
||||
const createRulesetForStreamService = (fileId: string, title: string, streamServices: Array<import('../Source/stream').StreamService>) => {
|
||||
return [
|
||||
// Domains
|
||||
...createRuleset(
|
||||
@ -15,10 +15,10 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
|
||||
[
|
||||
...SHARED_DESCRIPTION,
|
||||
'',
|
||||
...streamServices.map((i: { name: any; }) => `- ${i.name}`)
|
||||
...streamServices.map((i) => `- ${i.name}`)
|
||||
],
|
||||
new Date(),
|
||||
streamServices.flatMap((i: { rules: any; }) => i.rules),
|
||||
streamServices.flatMap((i) => i.rules),
|
||||
'ruleset',
|
||||
path.resolve(import.meta.dir, `../List/non_ip/${fileId}.conf`),
|
||||
path.resolve(import.meta.dir, `../Clash/non_ip/${fileId}.txt`)
|
||||
@ -29,14 +29,14 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ
|
||||
[
|
||||
...SHARED_DESCRIPTION,
|
||||
'',
|
||||
...streamServices.map((i: { name: any; }) => `- ${i.name}`)
|
||||
...streamServices.map((i) => `- ${i.name}`)
|
||||
],
|
||||
new Date(),
|
||||
streamServices.flatMap((i) => (
|
||||
i.ip
|
||||
? [
|
||||
...i.ip.v4.map((ip: any) => `IP-CIDR,${ip},no-resolve`),
|
||||
...i.ip.v6.map((ip: any) => `IP-CIDR6,${ip},no-resolve`)
|
||||
...i.ip.v4.map((ip) => `IP-CIDR,${ip},no-resolve`),
|
||||
...i.ip.v6.map((ip) => `IP-CIDR6,${ip},no-resolve`)
|
||||
]
|
||||
: []
|
||||
)),
|
||||
|
||||
@ -71,7 +71,7 @@ export const downloadPreviousBuild = task(import.meta.path, async () => {
|
||||
return;
|
||||
}
|
||||
|
||||
const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, '');
|
||||
const relativeEntryPath = entry.path.replace(`ruleset.skk.moe-master${path.sep}`, '');
|
||||
|
||||
const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
|
||||
await fsp.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
@ -105,7 +105,7 @@ export const downloadPublicSuffixList = task(import.meta.path, async () => {
|
||||
fsp.mkdir(publicSuffixDir, { recursive: true })
|
||||
]);
|
||||
|
||||
return Bun.write(publicSuffixPath, resp);
|
||||
return Bun.write(publicSuffixPath, resp as Response);
|
||||
}, 'download-publicsuffixlist');
|
||||
|
||||
if (import.meta.main) {
|
||||
|
||||
@ -98,7 +98,7 @@ import { buildPublicHtml } from './build-public';
|
||||
printStats(stats);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
console.error('Something went wrong!')
|
||||
console.error('Something went wrong!');
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
interface Node {
|
||||
/** @default 0 */
|
||||
depth?: number;
|
||||
key: string;
|
||||
depth?: number,
|
||||
key: string,
|
||||
/** @default false */
|
||||
word?: boolean;
|
||||
children: Record<string, Node>;
|
||||
fail?: Node;
|
||||
count: number;
|
||||
word?: boolean,
|
||||
children: Record<string, Node>,
|
||||
fail?: Node,
|
||||
count: number
|
||||
}
|
||||
|
||||
const createNode = (key: string, depth = 0): Node => ({
|
||||
@ -31,15 +31,15 @@ const createKeywordFilter = (keys: string[] | Set<string>) => {
|
||||
const map = beginNode.children;
|
||||
// eslint-disable-next-line guard-for-in -- plain object
|
||||
for (const key in beginNode.children) {
|
||||
const node = map?.[key];
|
||||
const node = map[key];
|
||||
let failNode = beginNode.fail;
|
||||
|
||||
while (failNode && !failNode.children?.[key]) {
|
||||
while (failNode && !failNode.children[key]) {
|
||||
failNode = failNode.fail;
|
||||
}
|
||||
|
||||
if (node) {
|
||||
node.fail = failNode?.children?.[key] || root;
|
||||
node.fail = failNode?.children[key] || root;
|
||||
|
||||
queue.push(node);
|
||||
}
|
||||
@ -86,8 +86,8 @@ const createKeywordFilter = (keys: string[] | Set<string>) => {
|
||||
// const key = text.charAt(i);
|
||||
const key = text[i];
|
||||
|
||||
while (node && !node?.children[key]) {
|
||||
node = node?.fail;
|
||||
while (node && !node.children[key]) {
|
||||
node = node.fail;
|
||||
}
|
||||
node = node?.children[key] || root;
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import tldts from 'tldts';
|
||||
import * as tldts from 'tldts';
|
||||
import { createCache } from './cache-apply';
|
||||
import { PublicSuffixList } from 'gorhill-publicsuffixlist';
|
||||
import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
|
||||
|
||||
const cache = createCache('cached-tld-parse', true);
|
||||
|
||||
@ -12,6 +12,6 @@ let gothillGetDomainCache: ReturnType<typeof createCache> | null = null;
|
||||
export const createCachedGorhillGetDomain = (gorhill: PublicSuffixList) => {
|
||||
return (domain: string) => {
|
||||
gothillGetDomainCache ??= createCache('cached-gorhill-get-domain', true);
|
||||
return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain))
|
||||
return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain));
|
||||
};
|
||||
};
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
export const SHARED_DESCRIPTION = [
|
||||
'License: AGPL 3.0',
|
||||
'Homepage: https://ruleset.skk.moe',
|
||||
'GitHub: https://github.com/SukkaW/Surge',
|
||||
'GitHub: https://github.com/SukkaW/Surge'
|
||||
] as const;
|
||||
|
||||
@ -85,7 +85,7 @@ export const createRuleset = (
|
||||
_clashContent = surgeRulesetToClashClassicalTextRuleset(content);
|
||||
break;
|
||||
default:
|
||||
throw new TypeError(`Unknown type: ${type}`);
|
||||
throw new TypeError(`Unknown type: ${type as any}`);
|
||||
}
|
||||
|
||||
const clashContent = withBannerArray(title, description, date, _clashContent);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import createTrie from './trie';
|
||||
import { createTrie } from './trie';
|
||||
|
||||
export const domainDeduper = (inputDomains: string[]): string[] => {
|
||||
const trie = createTrie(inputDomains);
|
||||
|
||||
@ -9,11 +9,32 @@ const FACTOR = 6;
|
||||
function isClientError(err: any): err is NodeJS.ErrnoException {
|
||||
if (!err) return false;
|
||||
return (
|
||||
err.code === 'ERR_UNESCAPED_CHARACTERS' ||
|
||||
err.message === 'Request path contains unescaped characters'
|
||||
err.code === 'ERR_UNESCAPED_CHARACTERS'
|
||||
|| err.message === 'Request path contains unescaped characters'
|
||||
);
|
||||
}
|
||||
|
||||
export class ResponseError extends Error {
|
||||
readonly res: Response;
|
||||
readonly code: number;
|
||||
readonly statusCode: number;
|
||||
readonly url: string;
|
||||
|
||||
constructor(res: Response) {
|
||||
super(res.statusText);
|
||||
|
||||
if ('captureStackTrace' in Error) {
|
||||
Error.captureStackTrace(this, ResponseError);
|
||||
}
|
||||
|
||||
this.name = this.constructor.name;
|
||||
this.res = res;
|
||||
this.code = res.status;
|
||||
this.statusCode = res.status;
|
||||
this.url = res.url;
|
||||
}
|
||||
}
|
||||
|
||||
interface FetchRetryOpt {
|
||||
minTimeout?: number,
|
||||
retries?: number,
|
||||
@ -32,7 +53,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
|
||||
minTimeout: MIN_TIMEOUT,
|
||||
retries: MAX_RETRIES,
|
||||
factor: FACTOR,
|
||||
maxRetryAfter: MAX_RETRY_AFTER,
|
||||
maxRetryAfter: MAX_RETRY_AFTER
|
||||
},
|
||||
opts.retry
|
||||
);
|
||||
@ -41,19 +62,18 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
|
||||
return await retry(async (bail) => {
|
||||
try {
|
||||
// this will be retried
|
||||
const res = await $fetch(url, opts);
|
||||
const res = (await $fetch(url, opts)) as Response;
|
||||
|
||||
if ((res.status >= 500 && res.status < 600) || res.status === 429) {
|
||||
// NOTE: doesn't support http-date format
|
||||
const retryAfterHeader = res.headers.get('retry-after');
|
||||
if (retryAfterHeader) {
|
||||
const retryAfter = parseInt(retryAfterHeader, 10);
|
||||
const retryAfter = Number.parseInt(retryAfterHeader, 10);
|
||||
if (retryAfter) {
|
||||
if (retryAfter > retryOpts.maxRetryAfter) {
|
||||
return res;
|
||||
} else {
|
||||
await new Promise((r) => setTimeout(r, retryAfter * 1e3));
|
||||
}
|
||||
await Bun.sleep(retryAfter * 1e3);
|
||||
}
|
||||
}
|
||||
throw new ResponseError(res);
|
||||
@ -78,7 +98,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for (const k of Object.keys($fetch)) {
|
||||
const key = k as keyof typeof $fetch;
|
||||
@ -88,30 +108,10 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
|
||||
return fetchRetry as typeof fetch;
|
||||
}
|
||||
|
||||
export class ResponseError extends Error {
|
||||
readonly res: Response;
|
||||
readonly code: number;
|
||||
readonly statusCode: number;
|
||||
readonly url: string;
|
||||
|
||||
constructor(res: Response) {
|
||||
super(res.statusText);
|
||||
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, ResponseError);
|
||||
}
|
||||
|
||||
this.name = this.constructor.name;
|
||||
this.res = res;
|
||||
this.code = this.statusCode = res.status;
|
||||
this.url = res.url;
|
||||
}
|
||||
}
|
||||
|
||||
export const defaultRequestInit: RequestInit = {
|
||||
headers: {
|
||||
'User-Agent': 'curl/8.1.2 (https://github.com/SukkaW/Surge)'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchWithRetry = createFetchRetry(fetch);
|
||||
|
||||
@ -7,7 +7,7 @@ import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
|
||||
const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt');
|
||||
|
||||
const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
|
||||
const customFetch = async (url: string | URL) => Bun.file(url);
|
||||
const customFetch = (url: string | URL) => Promise.resolve(Bun.file(url));
|
||||
|
||||
const publicSuffixFile = Bun.file(publicSuffixPath);
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { fetchRemoteTextAndCreateReadlineInterface } from './fetch-remote-text-by-line';
|
||||
import tldts from 'tldts';
|
||||
import { parse } from 'tldts';
|
||||
|
||||
const isDomainLoose = (domain: string): boolean => {
|
||||
const { isIcann, isPrivate, isIp } = tldts.parse(domain);
|
||||
const { isIcann, isPrivate, isIp } = parse(domain);
|
||||
return !!(!isIp && (isIcann || isPrivate));
|
||||
};
|
||||
|
||||
|
||||
@ -8,7 +8,7 @@ import { performance } from 'perf_hooks';
|
||||
import { getGorhillPublicSuffixPromise } from './get-gorhill-publicsuffix';
|
||||
import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
|
||||
|
||||
const DEBUG_DOMAIN_TO_FIND = null; // example.com | null
|
||||
const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null
|
||||
let foundDebugDomain = false;
|
||||
|
||||
const warnOnceUrl = new Set<string>();
|
||||
@ -63,7 +63,7 @@ export async function processDomainLists(domainListsUrl: string | URL) {
|
||||
}
|
||||
|
||||
export async function processHosts(hostsUrl: string | URL, includeAllSubDomain = false, skipDomainCheck = false) {
|
||||
console.time(`- processHosts: ${hostsUrl}`);
|
||||
console.time(`- processHosts: ${hostsUrl.toString()}`);
|
||||
|
||||
if (typeof hostsUrl === 'string') {
|
||||
hostsUrl = new URL(hostsUrl);
|
||||
@ -95,14 +95,14 @@ export async function processHosts(hostsUrl: string | URL, includeAllSubDomain =
|
||||
}
|
||||
}
|
||||
|
||||
console.timeEnd(` - processHosts: ${hostsUrl}`);
|
||||
console.timeEnd(` - processHosts: ${hostsUrl.toString()}`);
|
||||
|
||||
return domainSets;
|
||||
}
|
||||
|
||||
export async function processFilterRules(
|
||||
filterRulesUrl: string | URL,
|
||||
fallbackUrls?: readonly (string | URL)[] | undefined
|
||||
fallbackUrls?: ReadonlyArray<string | URL> | undefined
|
||||
): Promise<{ white: Set<string>, black: Set<string>, foundDebugDomain: boolean }> {
|
||||
const runStart = performance.now();
|
||||
|
||||
@ -167,7 +167,7 @@ export async function processFilterRules(
|
||||
addToBlackList(hostname, true);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown flag: ${flag}`);
|
||||
throw new Error(`Unknown flag: ${flag as any}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -192,7 +192,7 @@ export async function processFilterRules(
|
||||
/** @type string[] */
|
||||
filterRules = (
|
||||
await Promise.any(
|
||||
[filterRulesUrl, ...(fallbackUrls || [])].map(async url => {
|
||||
[filterRulesUrl, ...fallbackUrls].map(async url => {
|
||||
const r = await fetchWithRetry(url, { signal: controller.signal, ...defaultRequestInit });
|
||||
const text = await r.text();
|
||||
|
||||
@ -202,7 +202,7 @@ export async function processFilterRules(
|
||||
)
|
||||
).split('\n');
|
||||
} catch (e) {
|
||||
console.log(`Download Rule for [${filterRulesUrl}] failed`);
|
||||
console.log(`Download Rule for [${filterRulesUrl.toString()}] failed`);
|
||||
throw e;
|
||||
}
|
||||
downloadTime = performance.now() - downloadStart;
|
||||
@ -212,7 +212,7 @@ export async function processFilterRules(
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` ┬ processFilterRules (${filterRulesUrl}): ${(performance.now() - runStart).toFixed(3)}ms`);
|
||||
console.log(` ┬ processFilterRules (${filterRulesUrl.toString()}): ${(performance.now() - runStart).toFixed(3)}ms`);
|
||||
console.log(` └── download time: ${downloadTime.toFixed(3)}ms`);
|
||||
|
||||
return {
|
||||
|
||||
@ -177,7 +177,7 @@ export const PREDEFINED_WHITELIST = [
|
||||
// https://raw.githubusercontent.com/AdguardTeam/cname-trackers/master/data/combined_disguised_trackers.txt
|
||||
'vlscppe.microsoft.com',
|
||||
// OpenAI use this for A/B testing
|
||||
'statsig.com',
|
||||
'statsig.com'
|
||||
];
|
||||
|
||||
export const PREDEFINED_ENFORCED_BACKLIST = [
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import domainSorter from './stable-sort-domain';
|
||||
// eslint-disable-next-line import/no-unresolved -- fuck eslint-import
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
|
||||
describe('stable-sort-domain', () => {
|
||||
|
||||
@ -50,6 +50,7 @@ const createDomainSorter = (gorhill: PublicSuffixList | null = null) => {
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires -- fuck
|
||||
const tldts = require('./cached-tld-parse');
|
||||
|
||||
return (a: string, b: string) => {
|
||||
|
||||
@ -22,7 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, strin
|
||||
|
||||
constructor(
|
||||
encoding: Encoding = 'utf-8',
|
||||
{ fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {},
|
||||
{ fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {}
|
||||
) {
|
||||
const decoder = new TextDecoder(encoding, { fatal, ignoreBOM });
|
||||
super({
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
interface TextLineStreamOptions {
|
||||
/** Allow splitting by solo \r */
|
||||
allowCR: boolean;
|
||||
allowCR: boolean
|
||||
}
|
||||
|
||||
/** Transform a stream into a stream where each chunk is divided by a newline,
|
||||
@ -36,8 +36,8 @@ export class TextLineStream extends TransformStream<string, string> {
|
||||
const crIndex = chunk.indexOf('\r');
|
||||
|
||||
if (
|
||||
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
|
||||
(lfIndex === -1 || (lfIndex - 1) > crIndex)
|
||||
crIndex !== -1 && crIndex !== (chunk.length - 1)
|
||||
&& (lfIndex === -1 || (lfIndex - 1) > crIndex)
|
||||
) {
|
||||
controller.enqueue(chunk.slice(0, crIndex));
|
||||
chunk = chunk.slice(crIndex + 1);
|
||||
@ -62,13 +62,14 @@ export class TextLineStream extends TransformStream<string, string> {
|
||||
},
|
||||
flush(controller) {
|
||||
if (__buf.length > 0) {
|
||||
// eslint-disable-next-line sukka-ts/string/prefer-string-starts-ends-with -- performance
|
||||
if (allowCR && __buf[__buf.length - 1] === '\r') {
|
||||
controller.enqueue(__buf.slice(0, -1));
|
||||
} else {
|
||||
controller.enqueue(__buf);
|
||||
};
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -19,9 +19,9 @@ const traceAsync = async <T>(prefix: string, fn: () => Promise<T>): Promise<T> =
|
||||
export { traceAsync };
|
||||
|
||||
export interface TaskResult {
|
||||
readonly start: number;
|
||||
readonly end: number;
|
||||
readonly taskName: string;
|
||||
readonly start: number,
|
||||
readonly end: number,
|
||||
readonly taskName: string
|
||||
}
|
||||
|
||||
const task = <T>(importMetaPath: string, fn: () => Promise<T>, customname: string | null = null) => {
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import createTrie from './trie';
|
||||
import { createTrie } from './trie';
|
||||
// eslint-disable-next-line import/no-unresolved -- fuck eslint-import
|
||||
import { describe, expect, it } from 'bun:test';
|
||||
|
||||
describe('Trie', () => {
|
||||
|
||||
@ -8,7 +8,7 @@ export const SENTINEL: string = String.fromCodePoint(0);
|
||||
* @param {string[] | Set<string>} [from]
|
||||
*/
|
||||
export const createTrie = (from?: string[] | Set<string>) => {
|
||||
let size: number = 0;
|
||||
let size = 0;
|
||||
const root: any = {};
|
||||
|
||||
/**
|
||||
@ -56,7 +56,7 @@ export const createTrie = (from?: string[] | Set<string>) => {
|
||||
* @param {boolean} [includeEqualWithSuffix]
|
||||
* @return {string[]}
|
||||
*/
|
||||
const find = (suffix: string, includeEqualWithSuffix: boolean = true): string[] => {
|
||||
const find = (suffix: string, includeEqualWithSuffix = true): string[] => {
|
||||
let node: any = root;
|
||||
const matches: string[] = [];
|
||||
let token: string;
|
||||
|
||||
30
Build/mod.d.ts
vendored
30
Build/mod.d.ts
vendored
@ -2,33 +2,33 @@ declare module 'gorhill-publicsuffixlist' {
|
||||
type Selfie =
|
||||
| string
|
||||
| {
|
||||
magic: number;
|
||||
buf32: number[];
|
||||
magic: number,
|
||||
buf32: number[]
|
||||
};
|
||||
interface Decoder {
|
||||
decode: (bufferStr: string, buffer: ArrayBuffer) => void;
|
||||
decodeSize: (bufferStr: string) => number;
|
||||
decode: (bufferStr: string, buffer: ArrayBuffer) => void,
|
||||
decodeSize: (bufferStr: string) => number
|
||||
}
|
||||
interface Encoder {
|
||||
encode: (buffer: ArrayBuffer, length: number) => string;
|
||||
encode: (buffer: ArrayBuffer, length: number) => string
|
||||
}
|
||||
export interface PublicSuffixList {
|
||||
version: string;
|
||||
version: string,
|
||||
|
||||
parse(text: string, toAscii: (input: string) => string): void;
|
||||
parse(text: string, toAscii: (input: string) => string): void,
|
||||
|
||||
getPublicSuffix(hostname: string): string;
|
||||
getDomain(hostname: string): string;
|
||||
getPublicSuffix(hostname: string): string,
|
||||
getDomain(hostname: string): string,
|
||||
|
||||
suffixInPSL(hostname: string): boolean;
|
||||
suffixInPSL(hostname: string): boolean,
|
||||
|
||||
toSelfie(encoder?: null | Encoder): Selfie;
|
||||
fromSelfie(selfie: Selfie, decoder?: null | Decoder): boolean;
|
||||
toSelfie(encoder?: null | Encoder): Selfie,
|
||||
fromSelfie(selfie: Selfie, decoder?: null | Decoder): boolean,
|
||||
|
||||
enableWASM(options?: {
|
||||
customFetch?: null | ((url: URL) => Promise<Blob>);
|
||||
}): Promise<boolean>;
|
||||
disableWASM(): Promise<boolean>;
|
||||
customFetch?: null | ((url: URL) => Promise<Blob>)
|
||||
}): Promise<boolean>,
|
||||
disableWASM(): Promise<boolean>
|
||||
}
|
||||
|
||||
const psl: PublicSuffixList;
|
||||
|
||||
@ -9,7 +9,7 @@ const handleMessage = async (e: MessageEvent<'build' | 'exit'>) => {
|
||||
if (e.data === 'build') {
|
||||
const stat = await promise;
|
||||
postMessage(stat);
|
||||
} else if (e.data === 'exit') {
|
||||
} else /* if (e.data === 'exit') */ {
|
||||
self.removeEventListener('message', handleMessage);
|
||||
self.unref();
|
||||
self.terminate();
|
||||
|
||||
@ -5,7 +5,7 @@ module.exports = require('eslint-config-sukka').sukka({
|
||||
disableNoConsoleInCLI: ['Build/**'],
|
||||
env: {
|
||||
customGlobals: {
|
||||
'Bun': 'readonly'
|
||||
Bun: 'readonly'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -31,14 +31,14 @@
|
||||
"tldts": "^6.0.22"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint-sukka/node": "4.1.9",
|
||||
"@eslint-sukka/ts": "4.1.9",
|
||||
"@eslint-sukka/node": "4.1.10-beta.2",
|
||||
"@eslint-sukka/ts": "4.1.10-beta.2",
|
||||
"@types/async-retry": "^1.4.8",
|
||||
"@types/mocha": "10.0.2",
|
||||
"@types/tar": "^6.1.9",
|
||||
"bun-types": "^1.0.11",
|
||||
"chai": "4.3.10",
|
||||
"eslint-config-sukka": "4.1.9",
|
||||
"eslint-config-sukka": "4.1.10-beta.2",
|
||||
"eslint-formatter-sukka": "4.1.9",
|
||||
"mocha": "^10.2.0",
|
||||
"typescript": "^5.2.2"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user