mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-13 01:30:37 +08:00
Refactor: migrate to undici fetch
This commit is contained in:
parent
c94e28b2b7
commit
f51cea7547
@ -105,7 +105,8 @@ setGlobalDispatcher(agent.compose(
|
||||
}),
|
||||
interceptors.cache({
|
||||
store: new BetterSqlite3CacheStore({
|
||||
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db')
|
||||
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db'),
|
||||
maxEntrySize: 1024 * 1024 * 50 // 50 MiB
|
||||
})
|
||||
})
|
||||
));
|
||||
|
||||
@ -8,7 +8,6 @@ import { TextLineStream } from './text-line-transform-stream';
|
||||
import type { ReadableStream } from 'node:stream/web';
|
||||
import { TextDecoderStream } from 'node:stream/web';
|
||||
import { processLine, ProcessLineStream } from './process-line';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
import { $$fetch } from './fetch-retry';
|
||||
import type { UndiciResponseData } from './fetch-retry';
|
||||
import type { Response as UnidiciWebResponse } from 'undici';
|
||||
@ -38,7 +37,7 @@ export async function readFileByLineNew(file: string): Promise<AsyncIterable<str
|
||||
return fsp.open(file, 'r').then(fdReadLines);
|
||||
}
|
||||
|
||||
function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
|
||||
function ensureResponseBody<T extends UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
|
||||
if (resp.body == null) {
|
||||
throw new Error('Failed to fetch remote text');
|
||||
}
|
||||
@ -48,7 +47,7 @@ function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | U
|
||||
return resp.body;
|
||||
}
|
||||
|
||||
export const createReadlineInterfaceFromResponse: ((resp: NodeFetchResponse | UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
|
||||
export const createReadlineInterfaceFromResponse: ((resp: UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
|
||||
const stream = ensureResponseBody(resp);
|
||||
|
||||
const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
|
||||
|
||||
@ -1,86 +1,86 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import makeFetchHappen from 'make-fetch-happen';
|
||||
import type { FetchOptions } from 'make-fetch-happen';
|
||||
import cacache from 'cacache';
|
||||
import picocolors from 'picocolors';
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports -- type only
|
||||
import type { Response as NodeFetchResponse } from 'node-fetch';
|
||||
import { task } from '../trace';
|
||||
import { bytes } from 'xbits';
|
||||
import fsp from 'node:fs/promises';
|
||||
// import makeFetchHappen from 'make-fetch-happen';
|
||||
// import type { FetchOptions } from 'make-fetch-happen';
|
||||
// import cacache from 'cacache';
|
||||
// import picocolors from 'picocolors';
|
||||
|
||||
export type { NodeFetchResponse };
|
||||
import { task } from '../trace';
|
||||
// import { bytes } from 'xbits';
|
||||
|
||||
const cachePath = path.resolve(__dirname, '../../.cache/__make_fetch_happen__');
|
||||
fs.mkdirSync(cachePath, { recursive: true });
|
||||
// fs.mkdirSync(cachePath, { recursive: true });
|
||||
|
||||
interface CacacheVerifyStats {
|
||||
startTime: Date,
|
||||
endTime: Date,
|
||||
runTime: {
|
||||
markStartTime: 0,
|
||||
fixPerms: number,
|
||||
garbageCollect: number,
|
||||
rebuildIndex: number,
|
||||
cleanTmp: number,
|
||||
writeVerifile: number,
|
||||
markEndTime: number,
|
||||
total: number
|
||||
},
|
||||
verifiedContent: number,
|
||||
reclaimedCount: number,
|
||||
reclaimedSize: number,
|
||||
badContentCount: number,
|
||||
keptSize: number,
|
||||
missingContent: number,
|
||||
rejectedEntries: number,
|
||||
totalEntries: number
|
||||
}
|
||||
// interface CacacheVerifyStats {
|
||||
// startTime: Date,
|
||||
// endTime: Date,
|
||||
// runTime: {
|
||||
// markStartTime: 0,
|
||||
// fixPerms: number,
|
||||
// garbageCollect: number,
|
||||
// rebuildIndex: number,
|
||||
// cleanTmp: number,
|
||||
// writeVerifile: number,
|
||||
// markEndTime: number,
|
||||
// total: number
|
||||
// },
|
||||
// verifiedContent: number,
|
||||
// reclaimedCount: number,
|
||||
// reclaimedSize: number,
|
||||
// badContentCount: number,
|
||||
// keptSize: number,
|
||||
// missingContent: number,
|
||||
// rejectedEntries: number,
|
||||
// totalEntries: number
|
||||
// }
|
||||
|
||||
export const cacheGc = task(require.main === module, __filename)(
|
||||
(span) => span
|
||||
.traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
|
||||
.then((stats: CacacheVerifyStats) => {
|
||||
// console.log({ stats });
|
||||
console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
|
||||
console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
|
||||
console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
|
||||
})
|
||||
() => fsp.rm(cachePath, { recursive: true, force: true })
|
||||
// span
|
||||
// .traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
|
||||
// .then((stats: CacacheVerifyStats) => {
|
||||
// // console.log({ stats });
|
||||
// console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
|
||||
// console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
|
||||
// console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
|
||||
// });
|
||||
);
|
||||
|
||||
const _fetch = makeFetchHappen.defaults({
|
||||
cachePath,
|
||||
maxSockets: 32, /**
|
||||
* They said 15 is a good default that prevents knocking out others' routers,
|
||||
* I disagree. 32 is a good number.
|
||||
*/
|
||||
headers: {
|
||||
'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
|
||||
},
|
||||
retry: {
|
||||
retries: 5,
|
||||
randomize: true
|
||||
}
|
||||
});
|
||||
// const _fetch = makeFetchHappen.defaults({
|
||||
// cachePath,
|
||||
// maxSockets: 32, /**
|
||||
// * They said 15 is a good default that prevents knocking out others' routers,
|
||||
// * I disagree. 32 is a good number.
|
||||
// */
|
||||
// headers: {
|
||||
// 'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
|
||||
// },
|
||||
// retry: {
|
||||
// retries: 5,
|
||||
// randomize: true
|
||||
// }
|
||||
// });
|
||||
|
||||
export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
|
||||
return _fetch(uriOrRequest, opts).then((resp) => {
|
||||
printResponseStatus(resp);
|
||||
return resp;
|
||||
});
|
||||
}
|
||||
// export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
|
||||
// return _fetch(uriOrRequest, opts).then((resp) => {
|
||||
// printResponseStatus(resp);
|
||||
// return resp;
|
||||
// });
|
||||
// }
|
||||
|
||||
export async function $delete(resp: NodeFetchResponse) {
|
||||
const cacheKey = resp.headers.get('X-Local-Cache-Key');
|
||||
if (cacheKey) {
|
||||
await cacache.rm.entry(cachePath, cacheKey);
|
||||
await cacache.verify(cachePath, { concurrency: 64 });
|
||||
}
|
||||
}
|
||||
// export async function $delete(resp: NodeFetchResponse) {
|
||||
// const cacheKey = resp.headers.get('X-Local-Cache-Key');
|
||||
// if (cacheKey) {
|
||||
// await cacache.rm.entry(cachePath, cacheKey);
|
||||
// await cacache.verify(cachePath, { concurrency: 64 });
|
||||
// }
|
||||
// }
|
||||
|
||||
export function printResponseStatus(resp: NodeFetchResponse) {
|
||||
const status = resp.headers.get('X-Local-Cache-Status');
|
||||
if (status) {
|
||||
console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
|
||||
}
|
||||
}
|
||||
// export function printResponseStatus(resp: NodeFetchResponse) {
|
||||
// const status = resp.headers.get('X-Local-Cache-Status');
|
||||
// if (status) {
|
||||
// console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
|
||||
// }
|
||||
// }
|
||||
|
||||
// export { type Response as NodeFetchResponse } from 'node-fetch';
|
||||
|
||||
@ -4,7 +4,6 @@ import { createReadlineInterfaceFromResponse } from './fetch-text-by-line';
|
||||
// In short, single label domain suffix is ignored due to the size optimization, so no isIcann
|
||||
// import tldts from 'tldts-experimental';
|
||||
import tldts from 'tldts';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
import type { UndiciResponseData } from './fetch-retry';
|
||||
import type { Response } from 'undici';
|
||||
|
||||
@ -20,7 +19,7 @@ export function extractDomainsFromFelixDnsmasq(line: string): string | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function parseFelixDnsmasqFromResp(resp: NodeFetchResponse | UndiciResponseData | Response): Promise<string[]> {
|
||||
export async function parseFelixDnsmasqFromResp(resp: UndiciResponseData | Response): Promise<string[]> {
|
||||
const results: string[] = [];
|
||||
|
||||
for await (const line of createReadlineInterfaceFromResponse(resp, true)) {
|
||||
|
||||
@ -6,8 +6,8 @@ import { parse } from 'csv-parse/sync';
|
||||
import { readFileByLine } from './lib/fetch-text-by-line';
|
||||
import path from 'node:path';
|
||||
import { OUTPUT_SURGE_DIR } from './constants/dir';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
import { createRetrieKeywordFilter as createKeywordFilter } from 'foxts/retrie';
|
||||
import { $$fetch } from './lib/fetch-retry';
|
||||
|
||||
export async function parseGfwList() {
|
||||
const whiteSet = new Set<string>();
|
||||
@ -22,7 +22,7 @@ export async function parseGfwList() {
|
||||
'?'
|
||||
]);
|
||||
|
||||
const text = await (await $fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
||||
const text = await (await $$fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
||||
for (const l of atob(text).split('\n')) {
|
||||
const line = processLine(l);
|
||||
if (!line) continue;
|
||||
@ -60,11 +60,11 @@ export async function parseGfwList() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
for (const l of (await (await $fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
||||
for (const l of (await (await $$fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
||||
trie.add(l);
|
||||
}
|
||||
|
||||
const res = await (await $fetch('https://litter.catbox.moe/sqmgyn.csv', {
|
||||
const res = await (await $$fetch('https://litter.catbox.moe/sqmgyn.csv', {
|
||||
headers: {
|
||||
accept: '*/*',
|
||||
'user-agent': 'curl/8.9.1'
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user