diff --git a/Build/build-cdn-conf.ts b/Build/build-cdn-conf.ts index d042cd26..a30864e1 100644 --- a/Build/build-cdn-conf.ts +++ b/Build/build-cdn-conf.ts @@ -56,7 +56,7 @@ const getS3OSSDomains = async (): Promise> => { return S3OSSDomains; }; -const buildCdnConf = task(import.meta.path, async () => { +const buildCdnConf = task(import.meta.path, async () => { /** @type {string[]} */ const cdnDomainsList: string[] = []; diff --git a/Build/build-common.ts b/Build/build-common.ts index 2188bead..b16571d4 100644 --- a/Build/build-common.ts +++ b/Build/build-common.ts @@ -18,7 +18,7 @@ const outputSurgeDir = path.resolve(import.meta.dir, '../List'); const outputClashDir = path.resolve(import.meta.dir, '../Clash'); export const buildCommon = task(import.meta.path, async () => { - const promises: Promise[] = []; + const promises: Array> = []; const pw = new PathScurry(sourceDir); for await (const entry of pw) { @@ -57,23 +57,22 @@ const processFile = async (sourcePath: string) => { let title = ''; const descriptions: string[] = []; - try { for await (const line of readFileByLine(sourcePath)) { if (line === MAGIC_COMMAND_SKIP) { return; } - + if (line.startsWith(MAGIC_COMMAND_TITLE)) { title = line.slice(MAGIC_COMMAND_TITLE.length).trim(); continue; } - + if (line.startsWith(MAGIC_COMMAND_DESCRIPTION)) { descriptions.push(line.slice(MAGIC_COMMAND_DESCRIPTION.length).trim()); continue; } - + const l = processLine(line); if (l) { lines.push(l); diff --git a/Build/build-domestic-ruleset.ts b/Build/build-domestic-ruleset.ts index 422e74d0..9dd25f0f 100644 --- a/Build/build-domestic-ruleset.ts +++ b/Build/build-domestic-ruleset.ts @@ -11,15 +11,10 @@ export const buildDomesticRuleset = task(import.meta.path, async () => { const results = await processLineFromReadline(readFileByLine(path.resolve(import.meta.dir, '../Source/non_ip/domestic.conf'))); results.push( - ...Object.entries(DOMESTICS) - .reduce( - (acc, [key, { domains }]) => { - if (key === 'SYSTEM') return acc; - return [...acc, ...domains]; - }, - [] - ) - .map((domain) => `DOMAIN-SUFFIX,${domain}`) + ...Object.entries(DOMESTICS).reduce((acc, [key, { domains }]) => { + if (key === 'SYSTEM') return acc; + return [...acc, ...domains]; + }, []).map((domain) => `DOMAIN-SUFFIX,${domain}`) ); const rulesetDescription = [ diff --git a/Build/build-internal-cdn-rules.ts b/Build/build-internal-cdn-rules.ts index 7db68873..720f3350 100644 --- a/Build/build-internal-cdn-rules.ts +++ b/Build/build-internal-cdn-rules.ts @@ -1,5 +1,4 @@ -// @ts-check -import fsp from 'fs/promises' +import fsp from 'fs/promises'; import path from 'path'; import * as tldts from 'tldts'; import { processLine } from './lib/process-line'; @@ -14,7 +13,7 @@ const escapeRegExp = (string = '') => string.replaceAll(/[$()*+.?[\\\]^{|}]/g, ' export const buildInternalCDNDomains = task(import.meta.path, async () => { const set = new Set(); - const keywords = new Set(); + const keywords = new Set(); const gorhill = await getGorhillPublicSuffixPromise(); const domainSorter = createDomainSorter(gorhill); diff --git a/Build/build-internal-chn-domains.ts b/Build/build-internal-chn-domains.ts index 3328b523..821c1bd6 100644 --- a/Build/build-internal-chn-domains.ts +++ b/Build/build-internal-chn-domains.ts @@ -1,5 +1,5 @@ import path from 'path'; -import fsp from 'fs/promises' +import fsp from 'fs/promises'; import { parseFelixDnsmasq } from './lib/parse-dnsmasq'; import { task } from './lib/trace-runner'; import { compareAndWriteFile } from './lib/create-file'; diff --git a/Build/build-internal-reverse-chn-cidr.ts b/Build/build-internal-reverse-chn-cidr.ts index 19177c78..8bf6b801 100644 --- a/Build/build-internal-reverse-chn-cidr.ts +++ b/Build/build-internal-reverse-chn-cidr.ts @@ -1,7 +1,7 @@ import { fetchRemoteTextAndCreateReadlineInterface } from './lib/fetch-remote-text-by-line'; import { processLineFromReadline } from './lib/process-line'; import path from 'path'; -import fsp from 'fs/promises' +import fsp from 'fs/promises'; import { task } from './lib/trace-runner'; const RESERVED_IPV4_CIDR = [ diff --git a/Build/build-mitm-hostname.js b/Build/build-mitm-hostname.js index eac384b7..e1563736 100644 --- a/Build/build-mitm-hostname.js +++ b/Build/build-mitm-hostname.js @@ -1,7 +1,7 @@ const fsPromises = require('fs').promises; const pathFn = require('path'); const table = require('table'); -import listDir from '@sukka/listdir'; +const listDir = require('@sukka/listdir'); const { green, yellow } = require('picocolors'); const PRESET_MITM_HOSTNAMES = [ diff --git a/Build/build-phishing-domainset.ts b/Build/build-phishing-domainset.ts index d8e08f6e..d34ba684 100644 --- a/Build/build-phishing-domainset.ts +++ b/Build/build-phishing-domainset.ts @@ -1,10 +1,10 @@ -import { processFilterRules, processHosts } from './lib/parse-filter'; +import { processHosts } from './lib/parse-filter'; import path from 'path'; import { createRuleset } from './lib/create-file'; import { processLine } from './lib/process-line'; import { createDomainSorter } from './lib/stable-sort-domain'; import { traceSync, task } from './lib/trace-runner'; -import createTrie from './lib/trie'; +import { createTrie } from './lib/trie'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import { createCachedGorhillGetDomain } from './lib/cached-tld-parse'; import * as tldts from 'tldts'; @@ -156,11 +156,11 @@ export const buildPhishingDomainSet = task(import.meta.path, async () => { const results = traceSync('* get final results', () => Object.entries(domainCountMap) .reduce((acc, [apexDomain, count]) => { - if (count >= 5) { - acc.push(`.${apexDomain}`); - } - return acc; - }, []) + if (count >= 5) { + acc.push(`.${apexDomain}`); + } + return acc; + }, []) .sort(domainSorter)); const description = [ diff --git a/Build/build-public.ts b/Build/build-public.ts index 91f27a7b..143fd877 100644 --- a/Build/build-public.ts +++ b/Build/build-public.ts @@ -1,6 +1,6 @@ import listDir from '@sukka/listdir'; import path from 'path'; -import fsp from 'fs/promises' +import fsp from 'fs/promises'; import { task } from './lib/trace-runner'; const rootPath = path.resolve(import.meta.dir, '../'); diff --git a/Build/build-reject-domainset.ts b/Build/build-reject-domainset.ts index 5477999a..983aec99 100644 --- a/Build/build-reject-domainset.ts +++ b/Build/build-reject-domainset.ts @@ -1,9 +1,9 @@ // @ts-check -import fsp from 'fs/promises' +import fsp from 'fs/promises'; import path from 'path'; import { processHosts, processFilterRules } from './lib/parse-filter'; -import createTrie from './lib/trie'; +import { createTrie } from './lib/trie'; import { HOSTS, ADGUARD_FILTERS, PREDEFINED_WHITELIST, PREDEFINED_ENFORCED_BACKLIST } from './lib/reject-data-source'; import { createRuleset, compareAndWriteFile } from './lib/create-file'; @@ -20,13 +20,13 @@ import { SHARED_DESCRIPTION } from './lib/constants'; /** Whitelists */ const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST); /** @type {Set} Dedupe domains inclued by DOMAIN-KEYWORD */ -const domainKeywordsSet: Set = new Set(); +const domainKeywordsSet = new Set(); /** @type {Set} Dedupe domains included by DOMAIN-SUFFIX */ -const domainSuffixSet: Set = new Set(); +const domainSuffixSet = new Set(); export const buildRejectDomainSet = task(import.meta.path, async () => { /** @type Set */ - const domainSets: Set = new Set(); + const domainSets = new Set(); // Parse from AdGuard Filters console.time('* Download and process Hosts / AdBlock Filter Rules'); @@ -91,7 +91,6 @@ export const buildRejectDomainSet = task(import.meta.path, async () => { console.timeEnd('* Download and process Hosts / AdBlock Filter Rules'); if (shouldStop) { - // eslint-disable-next-line n/no-process-exit -- force stop process.exit(1); } @@ -173,7 +172,7 @@ export const buildRejectDomainSet = task(import.meta.path, async () => { console.log(`Deduped ${previousSize - dudupedDominArray.length} rules!`); // Create reject stats - const rejectDomainsStats: [string, number][] = traceSync( + const rejectDomainsStats: Array<[string, number]> = traceSync( '* Collect reject domain stats', () => Object.entries( dudupedDominArray.reduce>((acc, cur) => { diff --git a/Build/build-speedtest-domainset.ts b/Build/build-speedtest-domainset.ts index fa0c604a..96bd2b8f 100644 --- a/Build/build-speedtest-domainset.ts +++ b/Build/build-speedtest-domainset.ts @@ -14,7 +14,7 @@ const s = new Sema(3); const latestTopUserAgentsPromise = fetchWithRetry('https://unpkg.com/top-user-agents@latest/index.json') .then(res => res.json() as Promise); -const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> => { +const querySpeedtestApi = async (keyword: string): Promise> => { const [topUserAgents] = await Promise.all([ latestTopUserAgentsPromise, s.acquire() @@ -42,10 +42,10 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> => } }); if (!res.ok) { - throw new Error(res.statusText + '\n' + await res.text()); + throw new Error(`${res.statusText}\n${await res.text()}`); } - const json = await res.json() as { url: string; }[]; + const json = await res.json() as Array<{ url: string }>; s.release(); console.timeEnd(key); @@ -60,7 +60,7 @@ const querySpeedtestApi = async (keyword: string): Promise<(string | null)[]> => export const buildSpeedtestDomainSet = task(import.meta.path, async () => { /** @type {Set} */ - const domains: Set = new Set([ + const domains = new Set([ '.speedtest.net', '.speedtestcustom.com', '.ooklaserver.net', diff --git a/Build/build-stream-service.ts b/Build/build-stream-service.ts index 2d67d447..4810acbd 100644 --- a/Build/build-stream-service.ts +++ b/Build/build-stream-service.ts @@ -7,7 +7,7 @@ import { createRuleset } from './lib/create-file'; import { ALL, NORTH_AMERICA, EU, HK, TW, JP, KR } from '../Source/stream'; import { SHARED_DESCRIPTION } from './lib/constants'; -const createRulesetForStreamService = (fileId: string, title: string, streamServices: import('../Source/stream').StreamService[]) => { +const createRulesetForStreamService = (fileId: string, title: string, streamServices: Array) => { return [ // Domains ...createRuleset( @@ -15,10 +15,10 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ [ ...SHARED_DESCRIPTION, '', - ...streamServices.map((i: { name: any; }) => `- ${i.name}`) + ...streamServices.map((i) => `- ${i.name}`) ], new Date(), - streamServices.flatMap((i: { rules: any; }) => i.rules), + streamServices.flatMap((i) => i.rules), 'ruleset', path.resolve(import.meta.dir, `../List/non_ip/${fileId}.conf`), path.resolve(import.meta.dir, `../Clash/non_ip/${fileId}.txt`) @@ -29,14 +29,14 @@ const createRulesetForStreamService = (fileId: string, title: string, streamServ [ ...SHARED_DESCRIPTION, '', - ...streamServices.map((i: { name: any; }) => `- ${i.name}`) + ...streamServices.map((i) => `- ${i.name}`) ], new Date(), streamServices.flatMap((i) => ( i.ip ? [ - ...i.ip.v4.map((ip: any) => `IP-CIDR,${ip},no-resolve`), - ...i.ip.v6.map((ip: any) => `IP-CIDR6,${ip},no-resolve`) + ...i.ip.v4.map((ip) => `IP-CIDR,${ip},no-resolve`), + ...i.ip.v6.map((ip) => `IP-CIDR6,${ip},no-resolve`) ] : [] )), diff --git a/Build/download-previous-build.ts b/Build/download-previous-build.ts index d8371c01..ef1b3ec0 100644 --- a/Build/download-previous-build.ts +++ b/Build/download-previous-build.ts @@ -71,7 +71,7 @@ export const downloadPreviousBuild = task(import.meta.path, async () => { return; } - const relativeEntryPath = entry.path.replace('ruleset.skk.moe-master' + path.sep, ''); + const relativeEntryPath = entry.path.replace(`ruleset.skk.moe-master${path.sep}`, ''); const targetPath = path.join(import.meta.dir, '..', relativeEntryPath); await fsp.mkdir(path.dirname(targetPath), { recursive: true }); @@ -105,7 +105,7 @@ export const downloadPublicSuffixList = task(import.meta.path, async () => { fsp.mkdir(publicSuffixDir, { recursive: true }) ]); - return Bun.write(publicSuffixPath, resp); + return Bun.write(publicSuffixPath, resp as Response); }, 'download-publicsuffixlist'); if (import.meta.main) { diff --git a/Build/index.ts b/Build/index.ts index 2ad96bbd..aaa46283 100644 --- a/Build/index.ts +++ b/Build/index.ts @@ -98,7 +98,7 @@ import { buildPublicHtml } from './build-public'; printStats(stats); } catch (e) { console.error(e); - console.error('Something went wrong!') + console.error('Something went wrong!'); } })(); diff --git a/Build/lib/aho-corasick.ts b/Build/lib/aho-corasick.ts index 585cb3fb..da07ff42 100644 --- a/Build/lib/aho-corasick.ts +++ b/Build/lib/aho-corasick.ts @@ -1,12 +1,12 @@ interface Node { /** @default 0 */ - depth?: number; - key: string; + depth?: number, + key: string, /** @default false */ - word?: boolean; - children: Record; - fail?: Node; - count: number; + word?: boolean, + children: Record, + fail?: Node, + count: number } const createNode = (key: string, depth = 0): Node => ({ @@ -31,15 +31,15 @@ const createKeywordFilter = (keys: string[] | Set) => { const map = beginNode.children; // eslint-disable-next-line guard-for-in -- plain object for (const key in beginNode.children) { - const node = map?.[key]; + const node = map[key]; let failNode = beginNode.fail; - while (failNode && !failNode.children?.[key]) { + while (failNode && !failNode.children[key]) { failNode = failNode.fail; } if (node) { - node.fail = failNode?.children?.[key] || root; + node.fail = failNode?.children[key] || root; queue.push(node); } @@ -86,8 +86,8 @@ const createKeywordFilter = (keys: string[] | Set) => { // const key = text.charAt(i); const key = text[i]; - while (node && !node?.children[key]) { - node = node?.fail; + while (node && !node.children[key]) { + node = node.fail; } node = node?.children[key] || root; diff --git a/Build/lib/cached-tld-parse.ts b/Build/lib/cached-tld-parse.ts index 01d75ffd..356b2888 100644 --- a/Build/lib/cached-tld-parse.ts +++ b/Build/lib/cached-tld-parse.ts @@ -1,6 +1,6 @@ -import tldts from 'tldts'; +import * as tldts from 'tldts'; import { createCache } from './cache-apply'; -import { PublicSuffixList } from 'gorhill-publicsuffixlist'; +import type { PublicSuffixList } from 'gorhill-publicsuffixlist'; const cache = createCache('cached-tld-parse', true); @@ -12,6 +12,6 @@ let gothillGetDomainCache: ReturnType | null = null; export const createCachedGorhillGetDomain = (gorhill: PublicSuffixList) => { return (domain: string) => { gothillGetDomainCache ??= createCache('cached-gorhill-get-domain', true); - return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain)) + return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain)); }; }; diff --git a/Build/lib/constants.ts b/Build/lib/constants.ts index a6566f2b..503c74e1 100644 --- a/Build/lib/constants.ts +++ b/Build/lib/constants.ts @@ -1,5 +1,5 @@ export const SHARED_DESCRIPTION = [ 'License: AGPL 3.0', 'Homepage: https://ruleset.skk.moe', - 'GitHub: https://github.com/SukkaW/Surge', + 'GitHub: https://github.com/SukkaW/Surge' ] as const; diff --git a/Build/lib/create-file.ts b/Build/lib/create-file.ts index d0c09cd6..c6360216 100644 --- a/Build/lib/create-file.ts +++ b/Build/lib/create-file.ts @@ -85,7 +85,7 @@ export const createRuleset = ( _clashContent = surgeRulesetToClashClassicalTextRuleset(content); break; default: - throw new TypeError(`Unknown type: ${type}`); + throw new TypeError(`Unknown type: ${type as any}`); } const clashContent = withBannerArray(title, description, date, _clashContent); diff --git a/Build/lib/domain-deduper.ts b/Build/lib/domain-deduper.ts index 875853f5..e832273e 100644 --- a/Build/lib/domain-deduper.ts +++ b/Build/lib/domain-deduper.ts @@ -1,4 +1,4 @@ -import createTrie from './trie'; +import { createTrie } from './trie'; export const domainDeduper = (inputDomains: string[]): string[] => { const trie = createTrie(inputDomains); diff --git a/Build/lib/fetch-retry.ts b/Build/lib/fetch-retry.ts index 7a6dc0d4..8cb66166 100644 --- a/Build/lib/fetch-retry.ts +++ b/Build/lib/fetch-retry.ts @@ -9,11 +9,32 @@ const FACTOR = 6; function isClientError(err: any): err is NodeJS.ErrnoException { if (!err) return false; return ( - err.code === 'ERR_UNESCAPED_CHARACTERS' || - err.message === 'Request path contains unescaped characters' + err.code === 'ERR_UNESCAPED_CHARACTERS' + || err.message === 'Request path contains unescaped characters' ); } +export class ResponseError extends Error { + readonly res: Response; + readonly code: number; + readonly statusCode: number; + readonly url: string; + + constructor(res: Response) { + super(res.statusText); + + if ('captureStackTrace' in Error) { + Error.captureStackTrace(this, ResponseError); + } + + this.name = this.constructor.name; + this.res = res; + this.code = res.status; + this.statusCode = res.status; + this.url = res.url; + } +} + interface FetchRetryOpt { minTimeout?: number, retries?: number, @@ -32,7 +53,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch { minTimeout: MIN_TIMEOUT, retries: MAX_RETRIES, factor: FACTOR, - maxRetryAfter: MAX_RETRY_AFTER, + maxRetryAfter: MAX_RETRY_AFTER }, opts.retry ); @@ -41,19 +62,18 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch { return await retry(async (bail) => { try { // this will be retried - const res = await $fetch(url, opts); + const res = (await $fetch(url, opts)) as Response; if ((res.status >= 500 && res.status < 600) || res.status === 429) { // NOTE: doesn't support http-date format const retryAfterHeader = res.headers.get('retry-after'); if (retryAfterHeader) { - const retryAfter = parseInt(retryAfterHeader, 10); + const retryAfter = Number.parseInt(retryAfterHeader, 10); if (retryAfter) { if (retryAfter > retryOpts.maxRetryAfter) { return res; - } else { - await new Promise((r) => setTimeout(r, retryAfter * 1e3)); } + await Bun.sleep(retryAfter * 1e3); } } throw new ResponseError(res); @@ -78,7 +98,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch { } throw err; } - } + }; for (const k of Object.keys($fetch)) { const key = k as keyof typeof $fetch; @@ -88,30 +108,10 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch { return fetchRetry as typeof fetch; } -export class ResponseError extends Error { - readonly res: Response; - readonly code: number; - readonly statusCode: number; - readonly url: string; - - constructor(res: Response) { - super(res.statusText); - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, ResponseError); - } - - this.name = this.constructor.name; - this.res = res; - this.code = this.statusCode = res.status; - this.url = res.url; - } -} - export const defaultRequestInit: RequestInit = { headers: { 'User-Agent': 'curl/8.1.2 (https://github.com/SukkaW/Surge)' } -} +}; export const fetchWithRetry = createFetchRetry(fetch); diff --git a/Build/lib/get-gorhill-publicsuffix.ts b/Build/lib/get-gorhill-publicsuffix.ts index b7a47ab8..0d74af1f 100644 --- a/Build/lib/get-gorhill-publicsuffix.ts +++ b/Build/lib/get-gorhill-publicsuffix.ts @@ -7,7 +7,7 @@ import type { PublicSuffixList } from 'gorhill-publicsuffixlist'; const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt'); const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => { - const customFetch = async (url: string | URL) => Bun.file(url); + const customFetch = (url: string | URL) => Promise.resolve(Bun.file(url)); const publicSuffixFile = Bun.file(publicSuffixPath); diff --git a/Build/lib/parse-dnsmasq.ts b/Build/lib/parse-dnsmasq.ts index eb4b764c..18d772c6 100644 --- a/Build/lib/parse-dnsmasq.ts +++ b/Build/lib/parse-dnsmasq.ts @@ -1,8 +1,8 @@ import { fetchRemoteTextAndCreateReadlineInterface } from './fetch-remote-text-by-line'; -import tldts from 'tldts'; +import { parse } from 'tldts'; const isDomainLoose = (domain: string): boolean => { - const { isIcann, isPrivate, isIp } = tldts.parse(domain); + const { isIcann, isPrivate, isIp } = parse(domain); return !!(!isIp && (isIcann || isPrivate)); }; diff --git a/Build/lib/parse-filter.ts b/Build/lib/parse-filter.ts index 2c1cdb85..6817f827 100644 --- a/Build/lib/parse-filter.ts +++ b/Build/lib/parse-filter.ts @@ -8,7 +8,7 @@ import { performance } from 'perf_hooks'; import { getGorhillPublicSuffixPromise } from './get-gorhill-publicsuffix'; import type { PublicSuffixList } from 'gorhill-publicsuffixlist'; -const DEBUG_DOMAIN_TO_FIND = null; // example.com | null +const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null let foundDebugDomain = false; const warnOnceUrl = new Set(); @@ -63,7 +63,7 @@ export async function processDomainLists(domainListsUrl: string | URL) { } export async function processHosts(hostsUrl: string | URL, includeAllSubDomain = false, skipDomainCheck = false) { - console.time(`- processHosts: ${hostsUrl}`); + console.time(`- processHosts: ${hostsUrl.toString()}`); if (typeof hostsUrl === 'string') { hostsUrl = new URL(hostsUrl); @@ -95,14 +95,14 @@ export async function processHosts(hostsUrl: string | URL, includeAllSubDomain = } } - console.timeEnd(` - processHosts: ${hostsUrl}`); + console.timeEnd(` - processHosts: ${hostsUrl.toString()}`); return domainSets; } export async function processFilterRules( filterRulesUrl: string | URL, - fallbackUrls?: readonly (string | URL)[] | undefined + fallbackUrls?: ReadonlyArray | undefined ): Promise<{ white: Set, black: Set, foundDebugDomain: boolean }> { const runStart = performance.now(); @@ -167,7 +167,7 @@ export async function processFilterRules( addToBlackList(hostname, true); break; default: - throw new Error(`Unknown flag: ${flag}`); + throw new Error(`Unknown flag: ${flag as any}`); } } }; @@ -192,7 +192,7 @@ export async function processFilterRules( /** @type string[] */ filterRules = ( await Promise.any( - [filterRulesUrl, ...(fallbackUrls || [])].map(async url => { + [filterRulesUrl, ...fallbackUrls].map(async url => { const r = await fetchWithRetry(url, { signal: controller.signal, ...defaultRequestInit }); const text = await r.text(); @@ -202,7 +202,7 @@ export async function processFilterRules( ) ).split('\n'); } catch (e) { - console.log(`Download Rule for [${filterRulesUrl}] failed`); + console.log(`Download Rule for [${filterRulesUrl.toString()}] failed`); throw e; } downloadTime = performance.now() - downloadStart; @@ -212,7 +212,7 @@ export async function processFilterRules( } } - console.log(` ┬ processFilterRules (${filterRulesUrl}): ${(performance.now() - runStart).toFixed(3)}ms`); + console.log(` ┬ processFilterRules (${filterRulesUrl.toString()}): ${(performance.now() - runStart).toFixed(3)}ms`); console.log(` └── download time: ${downloadTime.toFixed(3)}ms`); return { diff --git a/Build/lib/reject-data-source.ts b/Build/lib/reject-data-source.ts index 23a3f2a7..5a2c7b11 100644 --- a/Build/lib/reject-data-source.ts +++ b/Build/lib/reject-data-source.ts @@ -177,7 +177,7 @@ export const PREDEFINED_WHITELIST = [ // https://raw.githubusercontent.com/AdguardTeam/cname-trackers/master/data/combined_disguised_trackers.txt 'vlscppe.microsoft.com', // OpenAI use this for A/B testing - 'statsig.com', + 'statsig.com' ]; export const PREDEFINED_ENFORCED_BACKLIST = [ diff --git a/Build/lib/stable-sort-domain.test.ts b/Build/lib/stable-sort-domain.test.ts index 3647a941..de3a79c7 100644 --- a/Build/lib/stable-sort-domain.test.ts +++ b/Build/lib/stable-sort-domain.test.ts @@ -1,4 +1,5 @@ import domainSorter from './stable-sort-domain'; +// eslint-disable-next-line import/no-unresolved -- fuck eslint-import import { describe, it, expect } from 'bun:test'; describe('stable-sort-domain', () => { diff --git a/Build/lib/stable-sort-domain.ts b/Build/lib/stable-sort-domain.ts index 354895d2..7aafac29 100644 --- a/Build/lib/stable-sort-domain.ts +++ b/Build/lib/stable-sort-domain.ts @@ -50,6 +50,7 @@ const createDomainSorter = (gorhill: PublicSuffixList | null = null) => { }; } + // eslint-disable-next-line @typescript-eslint/no-var-requires -- fuck const tldts = require('./cached-tld-parse'); return (a: string, b: string) => { diff --git a/Build/lib/text-decoder-stream.ts b/Build/lib/text-decoder-stream.ts index e9cd5de2..4dc51407 100644 --- a/Build/lib/text-decoder-stream.ts +++ b/Build/lib/text-decoder-stream.ts @@ -22,7 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream[1] = {}, + { fatal = false, ignoreBOM = false }: ConstructorParameters[1] = {} ) { const decoder = new TextDecoder(encoding, { fatal, ignoreBOM }); super({ diff --git a/Build/lib/text-line-transform-stream.ts b/Build/lib/text-line-transform-stream.ts index 26c4a159..9bc2f175 100644 --- a/Build/lib/text-line-transform-stream.ts +++ b/Build/lib/text-line-transform-stream.ts @@ -4,7 +4,7 @@ interface TextLineStreamOptions { /** Allow splitting by solo \r */ - allowCR: boolean; + allowCR: boolean } /** Transform a stream into a stream where each chunk is divided by a newline, @@ -36,8 +36,8 @@ export class TextLineStream extends TransformStream { const crIndex = chunk.indexOf('\r'); if ( - crIndex !== -1 && crIndex !== (chunk.length - 1) && - (lfIndex === -1 || (lfIndex - 1) > crIndex) + crIndex !== -1 && crIndex !== (chunk.length - 1) + && (lfIndex === -1 || (lfIndex - 1) > crIndex) ) { controller.enqueue(chunk.slice(0, crIndex)); chunk = chunk.slice(crIndex + 1); @@ -62,13 +62,14 @@ export class TextLineStream extends TransformStream { }, flush(controller) { if (__buf.length > 0) { + // eslint-disable-next-line sukka-ts/string/prefer-string-starts-ends-with -- performance if (allowCR && __buf[__buf.length - 1] === '\r') { controller.enqueue(__buf.slice(0, -1)); } else { controller.enqueue(__buf); - }; + } } - }, + } }); } } diff --git a/Build/lib/trace-runner.ts b/Build/lib/trace-runner.ts index bb74c478..b2bf732f 100644 --- a/Build/lib/trace-runner.ts +++ b/Build/lib/trace-runner.ts @@ -19,9 +19,9 @@ const traceAsync = async (prefix: string, fn: () => Promise): Promise = export { traceAsync }; export interface TaskResult { - readonly start: number; - readonly end: number; - readonly taskName: string; + readonly start: number, + readonly end: number, + readonly taskName: string } const task = (importMetaPath: string, fn: () => Promise, customname: string | null = null) => { diff --git a/Build/lib/trie.test.ts b/Build/lib/trie.test.ts index afaa3a34..c942453f 100644 --- a/Build/lib/trie.test.ts +++ b/Build/lib/trie.test.ts @@ -1,4 +1,5 @@ -import createTrie from './trie'; +import { createTrie } from './trie'; +// eslint-disable-next-line import/no-unresolved -- fuck eslint-import import { describe, expect, it } from 'bun:test'; describe('Trie', () => { diff --git a/Build/lib/trie.ts b/Build/lib/trie.ts index 0abcbee5..e553840c 100644 --- a/Build/lib/trie.ts +++ b/Build/lib/trie.ts @@ -8,7 +8,7 @@ export const SENTINEL: string = String.fromCodePoint(0); * @param {string[] | Set} [from] */ export const createTrie = (from?: string[] | Set) => { - let size: number = 0; + let size = 0; const root: any = {}; /** @@ -56,7 +56,7 @@ export const createTrie = (from?: string[] | Set) => { * @param {boolean} [includeEqualWithSuffix] * @return {string[]} */ - const find = (suffix: string, includeEqualWithSuffix: boolean = true): string[] => { + const find = (suffix: string, includeEqualWithSuffix = true): string[] => { let node: any = root; const matches: string[] = []; let token: string; diff --git a/Build/mod.d.ts b/Build/mod.d.ts index 1f3c0032..5e6e1299 100644 --- a/Build/mod.d.ts +++ b/Build/mod.d.ts @@ -2,33 +2,33 @@ declare module 'gorhill-publicsuffixlist' { type Selfie = | string | { - magic: number; - buf32: number[]; - }; + magic: number, + buf32: number[] + }; interface Decoder { - decode: (bufferStr: string, buffer: ArrayBuffer) => void; - decodeSize: (bufferStr: string) => number; + decode: (bufferStr: string, buffer: ArrayBuffer) => void, + decodeSize: (bufferStr: string) => number } interface Encoder { - encode: (buffer: ArrayBuffer, length: number) => string; + encode: (buffer: ArrayBuffer, length: number) => string } export interface PublicSuffixList { - version: string; + version: string, - parse(text: string, toAscii: (input: string) => string): void; + parse(text: string, toAscii: (input: string) => string): void, - getPublicSuffix(hostname: string): string; - getDomain(hostname: string): string; + getPublicSuffix(hostname: string): string, + getDomain(hostname: string): string, - suffixInPSL(hostname: string): boolean; + suffixInPSL(hostname: string): boolean, - toSelfie(encoder?: null | Encoder): Selfie; - fromSelfie(selfie: Selfie, decoder?: null | Decoder): boolean; + toSelfie(encoder?: null | Encoder): Selfie, + fromSelfie(selfie: Selfie, decoder?: null | Decoder): boolean, enableWASM(options?: { - customFetch?: null | ((url: URL) => Promise); - }): Promise; - disableWASM(): Promise; + customFetch?: null | ((url: URL) => Promise) + }): Promise, + disableWASM(): Promise } const psl: PublicSuffixList; diff --git a/Build/workers/build-internal-reverse-chn-cidr-worker.ts b/Build/workers/build-internal-reverse-chn-cidr-worker.ts index eef5944f..c692fe00 100644 --- a/Build/workers/build-internal-reverse-chn-cidr-worker.ts +++ b/Build/workers/build-internal-reverse-chn-cidr-worker.ts @@ -9,7 +9,7 @@ const handleMessage = async (e: MessageEvent<'build' | 'exit'>) => { if (e.data === 'build') { const stat = await promise; postMessage(stat); - } else if (e.data === 'exit') { + } else /* if (e.data === 'exit') */ { self.removeEventListener('message', handleMessage); self.unref(); self.terminate(); diff --git a/bun.lockb b/bun.lockb index dc6d9eba..d7b3132c 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/eslint.config.js b/eslint.config.js index fe1c67b3..69409992 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -5,7 +5,7 @@ module.exports = require('eslint-config-sukka').sukka({ disableNoConsoleInCLI: ['Build/**'], env: { customGlobals: { - 'Bun': 'readonly' + Bun: 'readonly' } } }, diff --git a/package.json b/package.json index a2e9e1e4..9998233f 100644 --- a/package.json +++ b/package.json @@ -31,14 +31,14 @@ "tldts": "^6.0.22" }, "devDependencies": { - "@eslint-sukka/node": "4.1.9", - "@eslint-sukka/ts": "4.1.9", + "@eslint-sukka/node": "4.1.10-beta.2", + "@eslint-sukka/ts": "4.1.10-beta.2", "@types/async-retry": "^1.4.8", "@types/mocha": "10.0.2", "@types/tar": "^6.1.9", "bun-types": "^1.0.11", "chai": "4.3.10", - "eslint-config-sukka": "4.1.9", + "eslint-config-sukka": "4.1.10-beta.2", "eslint-formatter-sukka": "4.1.9", "mocha": "^10.2.0", "typescript": "^5.2.2"