Remove unused codes
Some checks are pending
Build / Build (push) Waiting to run
Build / Diff output (push) Blocked by required conditions
Build / Deploy to Cloudflare Pages (push) Blocked by required conditions
Build / Deploy to GitHub and GitLab (push) Blocked by required conditions

This commit is contained in:
SukkaW
2025-02-04 10:13:01 +08:00
parent fddfa4f318
commit 118991b969
3 changed files with 3 additions and 50 deletions

View File

@@ -6,12 +6,8 @@ import { mkdirSync } from 'node:fs';
import picocolors from 'picocolors'; import picocolors from 'picocolors';
import { fastStringArrayJoin } from 'foxts/fast-string-array-join'; import { fastStringArrayJoin } from 'foxts/fast-string-array-join';
import { performance } from 'node:perf_hooks'; import { performance } from 'node:perf_hooks';
import fs from 'node:fs';
import { simpleStringHash } from 'foxts/simple-string-hash';
// import type { UndiciResponseData } from './fetch-retry'; // import type { UndiciResponseData } from './fetch-retry';
import { CACHE_DIR } from '../constants/dir';
export interface CacheOptions<S = string> { export interface CacheOptions<S = string> {
/** Path to sqlite file dir */ /** Path to sqlite file dir */
cachePath?: string, cachePath?: string,
@@ -35,32 +31,6 @@ interface CacheApplyNonRawOption<T, S> extends CacheApplyRawOption {
export type CacheApplyOption<T, S> = T extends S ? CacheApplyRawOption : CacheApplyNonRawOption<T, S>; export type CacheApplyOption<T, S> = T extends S ? CacheApplyRawOption : CacheApplyNonRawOption<T, S>;
const randomInt = (min: number, max: number) => Math.floor(Math.random() * (max - min + 1)) + min;
const ONE_HOUR = 60 * 60 * 1000;
const ONE_DAY = 24 * ONE_HOUR;
// Add some randomness to the cache ttl to avoid thundering herd
export const TTL = {
useHttp304: Symbol('useHttp304'),
humanReadable(ttl: number) {
if (ttl >= ONE_DAY) {
return `${Math.round(ttl / 24 / 60 / 60 / 1000)}d`;
}
if (ttl >= 60 * 60 * 1000) {
return `${Math.round(ttl / 60 / 60 / 1000)}h`;
}
return `${Math.round(ttl / 1000)}s`;
},
THREE_HOURS: () => randomInt(1, 3) * ONE_HOUR,
TWLVE_HOURS: () => randomInt(8, 12) * ONE_HOUR,
ONE_DAY: () => randomInt(23, 25) * ONE_HOUR,
ONE_WEEK_STATIC: ONE_DAY * 7,
THREE_DAYS: () => randomInt(1, 3) * ONE_DAY,
ONE_WEEK: () => randomInt(4, 7) * ONE_DAY,
TEN_DAYS: () => randomInt(7, 10) * ONE_DAY,
TWO_WEEKS: () => randomInt(10, 14) * ONE_DAY
};
export class Cache<S = string> { export class Cache<S = string> {
private db: Database; private db: Database;
/** Time before deletion */ /** Time before deletion */
@@ -186,21 +156,11 @@ export class Cache<S = string> {
} }
} }
// drop deprecated cache
new Cache({ cachePath: CACHE_DIR }).deleteTable('cache');
// process.on('exit', () => { // process.on('exit', () => {
// fsFetchCache.destroy(); // fsFetchCache.destroy();
// }); // });
const separator = '\u0000'; const separator = '\u0000';
export const serializeSet = (set: Set<string>) => fastStringArrayJoin(Array.from(set), separator);
export const deserializeSet = (str: string) => new Set(str.split(separator));
export const serializeArray = (arr: string[]) => fastStringArrayJoin(arr, separator); export const serializeArray = (arr: string[]) => fastStringArrayJoin(arr, separator);
export const deserializeArray = (str: string) => str.split(separator); export const deserializeArray = (str: string) => str.split(separator);
const getFileContentHash = (filename: string) => simpleStringHash(fs.readFileSync(filename, 'utf-8'));
export function createCacheKey(filename: string) {
const fileHash = getFileContentHash(filename);
return (key: string) => key + '$' + fileHash + '$';
}

View File

@@ -1,4 +1,4 @@
import { readFileByLine, readFileByLineNew } from './fetch-text-by-line'; import { readFileByLine/* , readFileByLineNew */ } from './fetch-text-by-line';
import path from 'node:path'; import path from 'node:path';
import fsp from 'node:fs/promises'; import fsp from 'node:fs/promises';
import { OUTPUT_SURGE_DIR } from '../constants/dir'; import { OUTPUT_SURGE_DIR } from '../constants/dir';
@@ -10,7 +10,7 @@ const file = path.join(OUTPUT_SURGE_DIR, 'domainset/reject_extra.conf');
group(() => { group(() => {
bench('readFileByLine', () => Array.fromAsync(readFileByLine(file))); bench('readFileByLine', () => Array.fromAsync(readFileByLine(file)));
bench('readFileByLineNew', async () => Array.fromAsync(await readFileByLineNew(file))); // bench('readFileByLineNew', async () => Array.fromAsync(await readFileByLineNew(file)));
bench('fsp.readFile', () => fsp.readFile(file, 'utf-8').then((content) => content.split('\n'))); bench('fsp.readFile', () => fsp.readFile(file, 'utf-8').then((content) => content.split('\n')));
}); });

View File

@@ -1,6 +1,4 @@
import fs from 'node:fs'; import fs from 'node:fs';
import fsp from 'node:fs/promises';
import type { FileHandle } from 'node:fs/promises';
import readline from 'node:readline'; import readline from 'node:readline';
import { TextLineStream } from './text-line-transform-stream'; import { TextLineStream } from './text-line-transform-stream';
@@ -19,11 +17,6 @@ export function readFileByLine(file: string): AsyncIterable<string> {
}); });
} }
const fdReadLines = (fd: FileHandle) => fd.readLines();
export async function readFileByLineNew(file: string): Promise<AsyncIterable<string>> {
return fsp.open(file, 'r').then(fdReadLines);
}
export const createReadlineInterfaceFromResponse: ((resp: UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => { export const createReadlineInterfaceFromResponse: ((resp: UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
invariant(resp.body, 'Failed to fetch remote text'); invariant(resp.body, 'Failed to fetch remote text');
if ('bodyUsed' in resp && resp.bodyUsed) { if ('bodyUsed' in resp && resp.bodyUsed) {