Make ESLint Happy

This commit is contained in:
SukkaW
2023-12-03 02:04:09 +08:00
parent 34ef0e58ff
commit d4ff4c5b2d
36 changed files with 136 additions and 140 deletions

View File

@@ -1,12 +1,12 @@
interface Node {
/** @default 0 */
depth?: number;
key: string;
depth?: number,
key: string,
/** @default false */
word?: boolean;
children: Record<string, Node>;
fail?: Node;
count: number;
word?: boolean,
children: Record<string, Node>,
fail?: Node,
count: number
}
const createNode = (key: string, depth = 0): Node => ({
@@ -31,15 +31,15 @@ const createKeywordFilter = (keys: string[] | Set<string>) => {
const map = beginNode.children;
// eslint-disable-next-line guard-for-in -- plain object
for (const key in beginNode.children) {
const node = map?.[key];
const node = map[key];
let failNode = beginNode.fail;
while (failNode && !failNode.children?.[key]) {
while (failNode && !failNode.children[key]) {
failNode = failNode.fail;
}
if (node) {
node.fail = failNode?.children?.[key] || root;
node.fail = failNode?.children[key] || root;
queue.push(node);
}
@@ -86,8 +86,8 @@ const createKeywordFilter = (keys: string[] | Set<string>) => {
// const key = text.charAt(i);
const key = text[i];
while (node && !node?.children[key]) {
node = node?.fail;
while (node && !node.children[key]) {
node = node.fail;
}
node = node?.children[key] || root;

View File

@@ -1,6 +1,6 @@
import tldts from 'tldts';
import * as tldts from 'tldts';
import { createCache } from './cache-apply';
import { PublicSuffixList } from 'gorhill-publicsuffixlist';
import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
const cache = createCache('cached-tld-parse', true);
@@ -12,6 +12,6 @@ let gothillGetDomainCache: ReturnType<typeof createCache> | null = null;
export const createCachedGorhillGetDomain = (gorhill: PublicSuffixList) => {
return (domain: string) => {
gothillGetDomainCache ??= createCache('cached-gorhill-get-domain', true);
return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain))
return gothillGetDomainCache.sync(domain, () => gorhill.getDomain(domain[0] === '.' ? domain.slice(1) : domain));
};
};

View File

@@ -1,5 +1,5 @@
export const SHARED_DESCRIPTION = [
'License: AGPL 3.0',
'Homepage: https://ruleset.skk.moe',
'GitHub: https://github.com/SukkaW/Surge',
'GitHub: https://github.com/SukkaW/Surge'
] as const;

View File

@@ -85,7 +85,7 @@ export const createRuleset = (
_clashContent = surgeRulesetToClashClassicalTextRuleset(content);
break;
default:
throw new TypeError(`Unknown type: ${type}`);
throw new TypeError(`Unknown type: ${type as any}`);
}
const clashContent = withBannerArray(title, description, date, _clashContent);

View File

@@ -1,4 +1,4 @@
import createTrie from './trie';
import { createTrie } from './trie';
export const domainDeduper = (inputDomains: string[]): string[] => {
const trie = createTrie(inputDomains);

View File

@@ -9,11 +9,32 @@ const FACTOR = 6;
function isClientError(err: any): err is NodeJS.ErrnoException {
if (!err) return false;
return (
err.code === 'ERR_UNESCAPED_CHARACTERS' ||
err.message === 'Request path contains unescaped characters'
err.code === 'ERR_UNESCAPED_CHARACTERS'
|| err.message === 'Request path contains unescaped characters'
);
}
export class ResponseError extends Error {
readonly res: Response;
readonly code: number;
readonly statusCode: number;
readonly url: string;
constructor(res: Response) {
super(res.statusText);
if ('captureStackTrace' in Error) {
Error.captureStackTrace(this, ResponseError);
}
this.name = this.constructor.name;
this.res = res;
this.code = res.status;
this.statusCode = res.status;
this.url = res.url;
}
}
interface FetchRetryOpt {
minTimeout?: number,
retries?: number,
@@ -32,7 +53,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
minTimeout: MIN_TIMEOUT,
retries: MAX_RETRIES,
factor: FACTOR,
maxRetryAfter: MAX_RETRY_AFTER,
maxRetryAfter: MAX_RETRY_AFTER
},
opts.retry
);
@@ -41,19 +62,18 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
return await retry(async (bail) => {
try {
// this will be retried
const res = await $fetch(url, opts);
const res = (await $fetch(url, opts)) as Response;
if ((res.status >= 500 && res.status < 600) || res.status === 429) {
// NOTE: doesn't support http-date format
const retryAfterHeader = res.headers.get('retry-after');
if (retryAfterHeader) {
const retryAfter = parseInt(retryAfterHeader, 10);
const retryAfter = Number.parseInt(retryAfterHeader, 10);
if (retryAfter) {
if (retryAfter > retryOpts.maxRetryAfter) {
return res;
} else {
await new Promise((r) => setTimeout(r, retryAfter * 1e3));
}
await Bun.sleep(retryAfter * 1e3);
}
}
throw new ResponseError(res);
@@ -78,7 +98,7 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
}
throw err;
}
}
};
for (const k of Object.keys($fetch)) {
const key = k as keyof typeof $fetch;
@@ -88,30 +108,10 @@ function createFetchRetry($fetch: typeof fetch): typeof fetch {
return fetchRetry as typeof fetch;
}
export class ResponseError extends Error {
readonly res: Response;
readonly code: number;
readonly statusCode: number;
readonly url: string;
constructor(res: Response) {
super(res.statusText);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, ResponseError);
}
this.name = this.constructor.name;
this.res = res;
this.code = this.statusCode = res.status;
this.url = res.url;
}
}
export const defaultRequestInit: RequestInit = {
headers: {
'User-Agent': 'curl/8.1.2 (https://github.com/SukkaW/Surge)'
}
}
};
export const fetchWithRetry = createFetchRetry(fetch);

View File

@@ -7,7 +7,7 @@ import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
const publicSuffixPath = path.resolve(import.meta.dir, '../../node_modules/.cache/public_suffix_list_dat.txt');
const getGorhillPublicSuffix = () => traceAsync('create gorhill public suffix instance', async () => {
const customFetch = async (url: string | URL) => Bun.file(url);
const customFetch = (url: string | URL) => Promise.resolve(Bun.file(url));
const publicSuffixFile = Bun.file(publicSuffixPath);

View File

@@ -1,8 +1,8 @@
import { fetchRemoteTextAndCreateReadlineInterface } from './fetch-remote-text-by-line';
import tldts from 'tldts';
import { parse } from 'tldts';
const isDomainLoose = (domain: string): boolean => {
const { isIcann, isPrivate, isIp } = tldts.parse(domain);
const { isIcann, isPrivate, isIp } = parse(domain);
return !!(!isIp && (isIcann || isPrivate));
};

View File

@@ -8,7 +8,7 @@ import { performance } from 'perf_hooks';
import { getGorhillPublicSuffixPromise } from './get-gorhill-publicsuffix';
import type { PublicSuffixList } from 'gorhill-publicsuffixlist';
const DEBUG_DOMAIN_TO_FIND = null; // example.com | null
const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null
let foundDebugDomain = false;
const warnOnceUrl = new Set<string>();
@@ -63,7 +63,7 @@ export async function processDomainLists(domainListsUrl: string | URL) {
}
export async function processHosts(hostsUrl: string | URL, includeAllSubDomain = false, skipDomainCheck = false) {
console.time(`- processHosts: ${hostsUrl}`);
console.time(`- processHosts: ${hostsUrl.toString()}`);
if (typeof hostsUrl === 'string') {
hostsUrl = new URL(hostsUrl);
@@ -95,14 +95,14 @@ export async function processHosts(hostsUrl: string | URL, includeAllSubDomain =
}
}
console.timeEnd(` - processHosts: ${hostsUrl}`);
console.timeEnd(` - processHosts: ${hostsUrl.toString()}`);
return domainSets;
}
export async function processFilterRules(
filterRulesUrl: string | URL,
fallbackUrls?: readonly (string | URL)[] | undefined
fallbackUrls?: ReadonlyArray<string | URL> | undefined
): Promise<{ white: Set<string>, black: Set<string>, foundDebugDomain: boolean }> {
const runStart = performance.now();
@@ -167,7 +167,7 @@ export async function processFilterRules(
addToBlackList(hostname, true);
break;
default:
throw new Error(`Unknown flag: ${flag}`);
throw new Error(`Unknown flag: ${flag as any}`);
}
}
};
@@ -192,7 +192,7 @@ export async function processFilterRules(
/** @type string[] */
filterRules = (
await Promise.any(
[filterRulesUrl, ...(fallbackUrls || [])].map(async url => {
[filterRulesUrl, ...fallbackUrls].map(async url => {
const r = await fetchWithRetry(url, { signal: controller.signal, ...defaultRequestInit });
const text = await r.text();
@@ -202,7 +202,7 @@ export async function processFilterRules(
)
).split('\n');
} catch (e) {
console.log(`Download Rule for [${filterRulesUrl}] failed`);
console.log(`Download Rule for [${filterRulesUrl.toString()}] failed`);
throw e;
}
downloadTime = performance.now() - downloadStart;
@@ -212,7 +212,7 @@ export async function processFilterRules(
}
}
console.log(` ┬ processFilterRules (${filterRulesUrl}): ${(performance.now() - runStart).toFixed(3)}ms`);
console.log(` ┬ processFilterRules (${filterRulesUrl.toString()}): ${(performance.now() - runStart).toFixed(3)}ms`);
console.log(` └── download time: ${downloadTime.toFixed(3)}ms`);
return {

View File

@@ -177,7 +177,7 @@ export const PREDEFINED_WHITELIST = [
// https://raw.githubusercontent.com/AdguardTeam/cname-trackers/master/data/combined_disguised_trackers.txt
'vlscppe.microsoft.com',
// OpenAI use this for A/B testing
'statsig.com',
'statsig.com'
];
export const PREDEFINED_ENFORCED_BACKLIST = [

View File

@@ -1,4 +1,5 @@
import domainSorter from './stable-sort-domain';
// eslint-disable-next-line import/no-unresolved -- fuck eslint-import
import { describe, it, expect } from 'bun:test';
describe('stable-sort-domain', () => {

View File

@@ -50,6 +50,7 @@ const createDomainSorter = (gorhill: PublicSuffixList | null = null) => {
};
}
// eslint-disable-next-line @typescript-eslint/no-var-requires -- fuck
const tldts = require('./cached-tld-parse');
return (a: string, b: string) => {

View File

@@ -22,7 +22,7 @@ export class PolyfillTextDecoderStream extends TransformStream<Uint8Array, strin
constructor(
encoding: Encoding = 'utf-8',
{ fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {},
{ fatal = false, ignoreBOM = false }: ConstructorParameters<typeof TextDecoder>[1] = {}
) {
const decoder = new TextDecoder(encoding, { fatal, ignoreBOM });
super({

View File

@@ -4,7 +4,7 @@
interface TextLineStreamOptions {
/** Allow splitting by solo \r */
allowCR: boolean;
allowCR: boolean
}
/** Transform a stream into a stream where each chunk is divided by a newline,
@@ -36,8 +36,8 @@ export class TextLineStream extends TransformStream<string, string> {
const crIndex = chunk.indexOf('\r');
if (
crIndex !== -1 && crIndex !== (chunk.length - 1) &&
(lfIndex === -1 || (lfIndex - 1) > crIndex)
crIndex !== -1 && crIndex !== (chunk.length - 1)
&& (lfIndex === -1 || (lfIndex - 1) > crIndex)
) {
controller.enqueue(chunk.slice(0, crIndex));
chunk = chunk.slice(crIndex + 1);
@@ -62,13 +62,14 @@ export class TextLineStream extends TransformStream<string, string> {
},
flush(controller) {
if (__buf.length > 0) {
// eslint-disable-next-line sukka-ts/string/prefer-string-starts-ends-with -- performance
if (allowCR && __buf[__buf.length - 1] === '\r') {
controller.enqueue(__buf.slice(0, -1));
} else {
controller.enqueue(__buf);
};
}
}
},
}
});
}
}

View File

@@ -19,9 +19,9 @@ const traceAsync = async <T>(prefix: string, fn: () => Promise<T>): Promise<T> =
export { traceAsync };
export interface TaskResult {
readonly start: number;
readonly end: number;
readonly taskName: string;
readonly start: number,
readonly end: number,
readonly taskName: string
}
const task = <T>(importMetaPath: string, fn: () => Promise<T>, customname: string | null = null) => {

View File

@@ -1,4 +1,5 @@
import createTrie from './trie';
import { createTrie } from './trie';
// eslint-disable-next-line import/no-unresolved -- fuck eslint-import
import { describe, expect, it } from 'bun:test';
describe('Trie', () => {

View File

@@ -8,7 +8,7 @@ export const SENTINEL: string = String.fromCodePoint(0);
* @param {string[] | Set<string>} [from]
*/
export const createTrie = (from?: string[] | Set<string>) => {
let size: number = 0;
let size = 0;
const root: any = {};
/**
@@ -56,7 +56,7 @@ export const createTrie = (from?: string[] | Set<string>) => {
* @param {boolean} [includeEqualWithSuffix]
* @return {string[]}
*/
const find = (suffix: string, includeEqualWithSuffix: boolean = true): string[] => {
const find = (suffix: string, includeEqualWithSuffix = true): string[] => {
let node: any = root;
const matches: string[] = [];
let token: string;