mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-13 01:30:37 +08:00
Refactor: migrate to undici fetch
This commit is contained in:
parent
c94e28b2b7
commit
f51cea7547
@ -105,7 +105,8 @@ setGlobalDispatcher(agent.compose(
|
|||||||
}),
|
}),
|
||||||
interceptors.cache({
|
interceptors.cache({
|
||||||
store: new BetterSqlite3CacheStore({
|
store: new BetterSqlite3CacheStore({
|
||||||
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db')
|
location: path.resolve(__dirname, '../../.cache/undici-better-sqlite3-cache-store.db'),
|
||||||
|
maxEntrySize: 1024 * 1024 * 50 // 50 MiB
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
|
|||||||
@ -8,7 +8,6 @@ import { TextLineStream } from './text-line-transform-stream';
|
|||||||
import type { ReadableStream } from 'node:stream/web';
|
import type { ReadableStream } from 'node:stream/web';
|
||||||
import { TextDecoderStream } from 'node:stream/web';
|
import { TextDecoderStream } from 'node:stream/web';
|
||||||
import { processLine, ProcessLineStream } from './process-line';
|
import { processLine, ProcessLineStream } from './process-line';
|
||||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
|
||||||
import { $$fetch } from './fetch-retry';
|
import { $$fetch } from './fetch-retry';
|
||||||
import type { UndiciResponseData } from './fetch-retry';
|
import type { UndiciResponseData } from './fetch-retry';
|
||||||
import type { Response as UnidiciWebResponse } from 'undici';
|
import type { Response as UnidiciWebResponse } from 'undici';
|
||||||
@ -38,7 +37,7 @@ export async function readFileByLineNew(file: string): Promise<AsyncIterable<str
|
|||||||
return fsp.open(file, 'r').then(fdReadLines);
|
return fsp.open(file, 'r').then(fdReadLines);
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
|
function ensureResponseBody<T extends UndiciResponseData | UnidiciWebResponse>(resp: T): NonNullable<T['body']> {
|
||||||
if (resp.body == null) {
|
if (resp.body == null) {
|
||||||
throw new Error('Failed to fetch remote text');
|
throw new Error('Failed to fetch remote text');
|
||||||
}
|
}
|
||||||
@ -48,7 +47,7 @@ function ensureResponseBody<T extends NodeFetchResponse | UndiciResponseData | U
|
|||||||
return resp.body;
|
return resp.body;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const createReadlineInterfaceFromResponse: ((resp: NodeFetchResponse | UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
|
export const createReadlineInterfaceFromResponse: ((resp: UndiciResponseData | UnidiciWebResponse, processLine?: boolean) => ReadableStream<string>) = (resp, processLine = false) => {
|
||||||
const stream = ensureResponseBody(resp);
|
const stream = ensureResponseBody(resp);
|
||||||
|
|
||||||
const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
|
const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
|
||||||
|
|||||||
@ -1,86 +1,86 @@
|
|||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import fs from 'node:fs';
|
import fsp from 'node:fs/promises';
|
||||||
import makeFetchHappen from 'make-fetch-happen';
|
// import makeFetchHappen from 'make-fetch-happen';
|
||||||
import type { FetchOptions } from 'make-fetch-happen';
|
// import type { FetchOptions } from 'make-fetch-happen';
|
||||||
import cacache from 'cacache';
|
// import cacache from 'cacache';
|
||||||
import picocolors from 'picocolors';
|
// import picocolors from 'picocolors';
|
||||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports -- type only
|
|
||||||
import type { Response as NodeFetchResponse } from 'node-fetch';
|
|
||||||
import { task } from '../trace';
|
|
||||||
import { bytes } from 'xbits';
|
|
||||||
|
|
||||||
export type { NodeFetchResponse };
|
import { task } from '../trace';
|
||||||
|
// import { bytes } from 'xbits';
|
||||||
|
|
||||||
const cachePath = path.resolve(__dirname, '../../.cache/__make_fetch_happen__');
|
const cachePath = path.resolve(__dirname, '../../.cache/__make_fetch_happen__');
|
||||||
fs.mkdirSync(cachePath, { recursive: true });
|
// fs.mkdirSync(cachePath, { recursive: true });
|
||||||
|
|
||||||
interface CacacheVerifyStats {
|
// interface CacacheVerifyStats {
|
||||||
startTime: Date,
|
// startTime: Date,
|
||||||
endTime: Date,
|
// endTime: Date,
|
||||||
runTime: {
|
// runTime: {
|
||||||
markStartTime: 0,
|
// markStartTime: 0,
|
||||||
fixPerms: number,
|
// fixPerms: number,
|
||||||
garbageCollect: number,
|
// garbageCollect: number,
|
||||||
rebuildIndex: number,
|
// rebuildIndex: number,
|
||||||
cleanTmp: number,
|
// cleanTmp: number,
|
||||||
writeVerifile: number,
|
// writeVerifile: number,
|
||||||
markEndTime: number,
|
// markEndTime: number,
|
||||||
total: number
|
// total: number
|
||||||
},
|
// },
|
||||||
verifiedContent: number,
|
// verifiedContent: number,
|
||||||
reclaimedCount: number,
|
// reclaimedCount: number,
|
||||||
reclaimedSize: number,
|
// reclaimedSize: number,
|
||||||
badContentCount: number,
|
// badContentCount: number,
|
||||||
keptSize: number,
|
// keptSize: number,
|
||||||
missingContent: number,
|
// missingContent: number,
|
||||||
rejectedEntries: number,
|
// rejectedEntries: number,
|
||||||
totalEntries: number
|
// totalEntries: number
|
||||||
}
|
// }
|
||||||
|
|
||||||
export const cacheGc = task(require.main === module, __filename)(
|
export const cacheGc = task(require.main === module, __filename)(
|
||||||
(span) => span
|
() => fsp.rm(cachePath, { recursive: true, force: true })
|
||||||
.traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
|
// span
|
||||||
.then((stats: CacacheVerifyStats) => {
|
// .traceChildAsync('cacache gc', () => cacache.verify(cachePath, { concurrency: 64 }))
|
||||||
// console.log({ stats });
|
// .then((stats: CacacheVerifyStats) => {
|
||||||
console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
|
// // console.log({ stats });
|
||||||
console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
|
// console.log(picocolors.green('[cacheGc] running gc on cache:'), cachePath);
|
||||||
console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
|
// console.log(picocolors.green('[cacheGc] content verified:'), stats.verifiedContent, '(' + bytes(stats.keptSize) + ')');
|
||||||
})
|
// console.log(picocolors.green('[cacheGc] reclaimed:'), stats.reclaimedCount, '(' + bytes(stats.reclaimedSize) + ')');
|
||||||
|
// });
|
||||||
);
|
);
|
||||||
|
|
||||||
const _fetch = makeFetchHappen.defaults({
|
// const _fetch = makeFetchHappen.defaults({
|
||||||
cachePath,
|
// cachePath,
|
||||||
maxSockets: 32, /**
|
// maxSockets: 32, /**
|
||||||
* They said 15 is a good default that prevents knocking out others' routers,
|
// * They said 15 is a good default that prevents knocking out others' routers,
|
||||||
* I disagree. 32 is a good number.
|
// * I disagree. 32 is a good number.
|
||||||
*/
|
// */
|
||||||
headers: {
|
// headers: {
|
||||||
'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
|
// 'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
|
||||||
},
|
// },
|
||||||
retry: {
|
// retry: {
|
||||||
retries: 5,
|
// retries: 5,
|
||||||
randomize: true
|
// randomize: true
|
||||||
}
|
// }
|
||||||
});
|
// });
|
||||||
|
|
||||||
export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
|
// export function $fetch(uriOrRequest: string | Request, opts?: FetchOptions) {
|
||||||
return _fetch(uriOrRequest, opts).then((resp) => {
|
// return _fetch(uriOrRequest, opts).then((resp) => {
|
||||||
printResponseStatus(resp);
|
// printResponseStatus(resp);
|
||||||
return resp;
|
// return resp;
|
||||||
});
|
// });
|
||||||
}
|
// }
|
||||||
|
|
||||||
export async function $delete(resp: NodeFetchResponse) {
|
// export async function $delete(resp: NodeFetchResponse) {
|
||||||
const cacheKey = resp.headers.get('X-Local-Cache-Key');
|
// const cacheKey = resp.headers.get('X-Local-Cache-Key');
|
||||||
if (cacheKey) {
|
// if (cacheKey) {
|
||||||
await cacache.rm.entry(cachePath, cacheKey);
|
// await cacache.rm.entry(cachePath, cacheKey);
|
||||||
await cacache.verify(cachePath, { concurrency: 64 });
|
// await cacache.verify(cachePath, { concurrency: 64 });
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
export function printResponseStatus(resp: NodeFetchResponse) {
|
// export function printResponseStatus(resp: NodeFetchResponse) {
|
||||||
const status = resp.headers.get('X-Local-Cache-Status');
|
// const status = resp.headers.get('X-Local-Cache-Status');
|
||||||
if (status) {
|
// if (status) {
|
||||||
console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
|
// console.log('[$fetch cache]', { status }, picocolors.gray(resp.url));
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
|
// export { type Response as NodeFetchResponse } from 'node-fetch';
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import { createReadlineInterfaceFromResponse } from './fetch-text-by-line';
|
|||||||
// In short, single label domain suffix is ignored due to the size optimization, so no isIcann
|
// In short, single label domain suffix is ignored due to the size optimization, so no isIcann
|
||||||
// import tldts from 'tldts-experimental';
|
// import tldts from 'tldts-experimental';
|
||||||
import tldts from 'tldts';
|
import tldts from 'tldts';
|
||||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
|
||||||
import type { UndiciResponseData } from './fetch-retry';
|
import type { UndiciResponseData } from './fetch-retry';
|
||||||
import type { Response } from 'undici';
|
import type { Response } from 'undici';
|
||||||
|
|
||||||
@ -20,7 +19,7 @@ export function extractDomainsFromFelixDnsmasq(line: string): string | null {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function parseFelixDnsmasqFromResp(resp: NodeFetchResponse | UndiciResponseData | Response): Promise<string[]> {
|
export async function parseFelixDnsmasqFromResp(resp: UndiciResponseData | Response): Promise<string[]> {
|
||||||
const results: string[] = [];
|
const results: string[] = [];
|
||||||
|
|
||||||
for await (const line of createReadlineInterfaceFromResponse(resp, true)) {
|
for await (const line of createReadlineInterfaceFromResponse(resp, true)) {
|
||||||
|
|||||||
@ -6,8 +6,8 @@ import { parse } from 'csv-parse/sync';
|
|||||||
import { readFileByLine } from './lib/fetch-text-by-line';
|
import { readFileByLine } from './lib/fetch-text-by-line';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { OUTPUT_SURGE_DIR } from './constants/dir';
|
import { OUTPUT_SURGE_DIR } from './constants/dir';
|
||||||
import { $fetch } from './lib/make-fetch-happen';
|
|
||||||
import { createRetrieKeywordFilter as createKeywordFilter } from 'foxts/retrie';
|
import { createRetrieKeywordFilter as createKeywordFilter } from 'foxts/retrie';
|
||||||
|
import { $$fetch } from './lib/fetch-retry';
|
||||||
|
|
||||||
export async function parseGfwList() {
|
export async function parseGfwList() {
|
||||||
const whiteSet = new Set<string>();
|
const whiteSet = new Set<string>();
|
||||||
@ -22,7 +22,7 @@ export async function parseGfwList() {
|
|||||||
'?'
|
'?'
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const text = await (await $fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
const text = await (await $$fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
||||||
for (const l of atob(text).split('\n')) {
|
for (const l of atob(text).split('\n')) {
|
||||||
const line = processLine(l);
|
const line = processLine(l);
|
||||||
if (!line) continue;
|
if (!line) continue;
|
||||||
@ -60,11 +60,11 @@ export async function parseGfwList() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const l of (await (await $fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
for (const l of (await (await $$fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
||||||
trie.add(l);
|
trie.add(l);
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = await (await $fetch('https://litter.catbox.moe/sqmgyn.csv', {
|
const res = await (await $$fetch('https://litter.catbox.moe/sqmgyn.csv', {
|
||||||
headers: {
|
headers: {
|
||||||
accept: '*/*',
|
accept: '*/*',
|
||||||
'user-agent': 'curl/8.9.1'
|
'user-agent': 'curl/8.9.1'
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user