mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 17:20:35 +08:00
Refactor: replace fetch with undici.request
This commit is contained in:
parent
ac470d4af9
commit
e0f7e7e48a
@ -8,10 +8,12 @@ import { fastStringArrayJoin, identity, mergeHeaders } from './misc';
|
||||
import { performance } from 'node:perf_hooks';
|
||||
import fs from 'node:fs';
|
||||
import { stringHash } from './string-hash';
|
||||
import { defaultRequestInit, fetchWithLog, ResponseError } from './fetch-retry';
|
||||
import { defaultRequestInit, requestWithLog, UndiciResponseError } from './fetch-retry';
|
||||
import type { UndiciResponseData } from './fetch-retry';
|
||||
import { Custom304NotModifiedError, CustomAbortError, CustomNoETagFallbackError, fetchAssetsWith304, sleepWithAbort } from './fetch-assets';
|
||||
|
||||
import type { Response, RequestInit, HeadersInit } from 'undici';
|
||||
import type { HeadersInit } from 'undici';
|
||||
import type { IncomingHttpHeaders } from 'undici/types/header';
|
||||
|
||||
const enum CacheStatus {
|
||||
Hit = 'hit',
|
||||
@ -68,6 +70,16 @@ export const TTL = {
|
||||
TWO_WEEKS: () => randomInt(10, 14) * ONE_DAY
|
||||
};
|
||||
|
||||
function ensureETag(headers: IncomingHttpHeaders) {
|
||||
if ('etag' in headers && typeof headers.etag === 'string' && headers.etag.length > 0) {
|
||||
return headers.etag;
|
||||
}
|
||||
if ('ETag' in headers && typeof headers.ETag === 'string' && headers.ETag.length > 0) {
|
||||
return headers.ETag;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
export class Cache<S = string> {
|
||||
db: Database;
|
||||
/** Time before deletion */
|
||||
@ -213,12 +225,12 @@ export class Cache<S = string> {
|
||||
async applyWithHttp304<T>(
|
||||
url: string,
|
||||
extraCacheKey: string,
|
||||
fn: (resp: Response) => Promise<T>,
|
||||
opt: Omit<CacheApplyOption<T, S>, 'incrementTtlWhenHit'>,
|
||||
requestInit?: RequestInit
|
||||
fn: (resp: UndiciResponseData) => Promise<T>,
|
||||
opt: Omit<CacheApplyOption<T, S>, 'incrementTtlWhenHit'>
|
||||
// requestInit?: RequestInit
|
||||
): Promise<T> {
|
||||
if (opt.temporaryBypass) {
|
||||
return fn(await fetchWithLog(url, requestInit));
|
||||
return fn(await requestWithLog(url));
|
||||
}
|
||||
|
||||
const baseKey = url + '$' + extraCacheKey;
|
||||
@ -227,19 +239,19 @@ export class Cache<S = string> {
|
||||
|
||||
const etag = this.get(etagKey);
|
||||
|
||||
const onMiss = async (resp: Response) => {
|
||||
const onMiss = async (resp: UndiciResponseData) => {
|
||||
const serializer = 'serializer' in opt ? opt.serializer : identity as any;
|
||||
|
||||
const value = await fn(resp);
|
||||
|
||||
if (resp.headers.has('ETag')) {
|
||||
let serverETag = resp.headers.get('ETag')!;
|
||||
let serverETag = ensureETag(resp.headers);
|
||||
if (serverETag) {
|
||||
// FUCK someonewhocares.org
|
||||
if (url.includes('someonewhocares.org')) {
|
||||
serverETag = serverETag.replace('-gzip', '');
|
||||
}
|
||||
|
||||
console.log(picocolors.yellow('[cache] miss'), url, { status: resp.status, cachedETag: etag, serverETag });
|
||||
console.log(picocolors.yellow('[cache] miss'), url, { status: resp.statusCode, cachedETag: etag, serverETag });
|
||||
|
||||
this.set(etagKey, serverETag, TTL.ONE_WEEK_STATIC);
|
||||
this.set(cachedKey, serializer(value), TTL.ONE_WEEK_STATIC);
|
||||
@ -257,28 +269,24 @@ export class Cache<S = string> {
|
||||
|
||||
const cached = this.get(cachedKey);
|
||||
if (cached == null) {
|
||||
return onMiss(await fetchWithLog(url, requestInit));
|
||||
return onMiss(await requestWithLog(url));
|
||||
}
|
||||
|
||||
const resp = await fetchWithLog(
|
||||
const resp = await requestWithLog(
|
||||
url,
|
||||
{
|
||||
...(requestInit ?? defaultRequestInit),
|
||||
headers: (typeof etag === 'string' && etag.length > 0)
|
||||
? mergeHeaders(
|
||||
(requestInit ?? defaultRequestInit).headers,
|
||||
{ 'If-None-Match': etag }
|
||||
)
|
||||
: (requestInit ?? defaultRequestInit).headers
|
||||
? { 'If-None-Match': etag }
|
||||
: {}
|
||||
}
|
||||
);
|
||||
|
||||
// Only miss if previously a ETag was present and the server responded with a 304
|
||||
if (resp.headers.has('ETag') && resp.status !== 304) {
|
||||
if (!ensureETag(resp.headers) && resp.statusCode !== 304) {
|
||||
return onMiss(resp);
|
||||
}
|
||||
|
||||
console.log(picocolors.green(`[cache] ${resp.status === 304 ? 'http 304' : 'cache hit'}`), picocolors.gray(url));
|
||||
console.log(picocolors.green(`[cache] ${resp.statusCode === 304 ? 'http 304' : 'cache hit'}`), picocolors.gray(url));
|
||||
this.updateTtl(cachedKey, TTL.ONE_WEEK_STATIC);
|
||||
|
||||
const deserializer = 'deserializer' in opt ? opt.deserializer : identity as any;
|
||||
@ -297,7 +305,7 @@ export class Cache<S = string> {
|
||||
}
|
||||
|
||||
if (mirrorUrls.length === 0) {
|
||||
return this.applyWithHttp304(primaryUrl, extraCacheKey, async (resp) => fn(await resp.text()), opt);
|
||||
return this.applyWithHttp304(primaryUrl, extraCacheKey, async (resp) => fn(await resp.body.text()), opt);
|
||||
}
|
||||
|
||||
const baseKey = primaryUrl + '$' + extraCacheKey;
|
||||
@ -323,7 +331,7 @@ export class Cache<S = string> {
|
||||
}
|
||||
|
||||
const etag = this.get(getETagKey(url));
|
||||
const res = await fetchWithLog(
|
||||
const res = await requestWithLog(
|
||||
url,
|
||||
{
|
||||
signal: controller.signal,
|
||||
@ -337,26 +345,26 @@ export class Cache<S = string> {
|
||||
}
|
||||
);
|
||||
|
||||
const responseHasETag = res.headers.has('etag');
|
||||
if (responseHasETag) {
|
||||
this.set(getETagKey(url), res.headers.get('etag')!, TTL.ONE_WEEK_STATIC);
|
||||
const serverETag = ensureETag(res.headers);
|
||||
if (serverETag) {
|
||||
this.set(getETagKey(url), serverETag, TTL.ONE_WEEK_STATIC);
|
||||
}
|
||||
// If we do not have a cached value, we ignore 304
|
||||
if (res.status === 304 && typeof previouslyCached === 'string') {
|
||||
if (res.statusCode === 304 && typeof previouslyCached === 'string') {
|
||||
controller.abort();
|
||||
throw new Custom304NotModifiedError(url, previouslyCached);
|
||||
}
|
||||
if (!responseHasETag && !this.get(getETagKey(primaryUrl)) && typeof previouslyCached === 'string') {
|
||||
if (!serverETag && !this.get(getETagKey(primaryUrl)) && typeof previouslyCached === 'string') {
|
||||
controller.abort();
|
||||
throw new CustomNoETagFallbackError(previouslyCached);
|
||||
}
|
||||
|
||||
// either no etag and not cached
|
||||
// or has etag but not 304
|
||||
const text = await res.text();
|
||||
const text = await res.body.text();
|
||||
|
||||
if (text.length < 2) {
|
||||
throw new ResponseError(res);
|
||||
throw new UndiciResponseError(res, url);
|
||||
}
|
||||
|
||||
controller.abort();
|
||||
|
||||
@ -4,7 +4,7 @@ import { createMemoizedPromise } from './memo-promise';
|
||||
export const getPublicSuffixListTextPromise = createMemoizedPromise(() => fsFetchCache.applyWithHttp304<string[]>(
|
||||
'https://publicsuffix.org/list/public_suffix_list.dat',
|
||||
getFileContentHash(__filename),
|
||||
(r) => r.text().then(text => text.split('\n')),
|
||||
(r) => r.body.text().then(text => text.split('\n')),
|
||||
{
|
||||
// https://github.com/publicsuffix/list/blob/master/.github/workflows/tld-update.yml
|
||||
// Though the action runs every 24 hours, the IANA list is updated every 7 days.
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import picocolors from 'picocolors';
|
||||
import { defaultRequestInit, fetchWithLog, ResponseError } from './fetch-retry';
|
||||
import { defaultRequestInit, requestWithLog, UndiciResponseError } from './fetch-retry';
|
||||
import { setTimeout } from 'node:timers/promises';
|
||||
|
||||
// eslint-disable-next-line sukka/unicorn/custom-error-definition -- typescript is better
|
||||
@ -59,11 +59,11 @@ export async function fetchAssetsWith304(url: string, fallbackUrls: string[] | r
|
||||
console.log(picocolors.gray('[fetch cancelled]'), picocolors.gray(url));
|
||||
throw new CustomAbortError();
|
||||
}
|
||||
const res = await fetchWithLog(url, { signal: controller.signal, ...defaultRequestInit });
|
||||
const text = await res.text();
|
||||
const res = await requestWithLog(url, { signal: controller.signal, ...defaultRequestInit });
|
||||
const text = await res.body.text();
|
||||
|
||||
if (text.length < 2) {
|
||||
throw new ResponseError(res);
|
||||
throw new UndiciResponseError(res, url);
|
||||
}
|
||||
|
||||
controller.abort();
|
||||
|
||||
@ -1,12 +1,15 @@
|
||||
import picocolors from 'picocolors';
|
||||
import {
|
||||
fetch,
|
||||
import undici, {
|
||||
interceptors,
|
||||
EnvHttpProxyAgent,
|
||||
setGlobalDispatcher
|
||||
} from 'undici';
|
||||
|
||||
import type { Response, RequestInit, RequestInfo } from 'undici';
|
||||
import type {
|
||||
Dispatcher
|
||||
} from 'undici';
|
||||
|
||||
export type UndiciResponseData<T = any> = Dispatcher.ResponseData<T>;
|
||||
|
||||
import CacheableLookup from 'cacheable-lookup';
|
||||
import type { LookupOptions as CacheableLookupOptions } from 'cacheable-lookup';
|
||||
@ -111,25 +114,22 @@ function calculateRetryAfterHeader(retryAfter: string) {
|
||||
return new Date(retryAfter).getTime() - current;
|
||||
}
|
||||
|
||||
export class ResponseError extends Error {
|
||||
readonly res: Response;
|
||||
export class UndiciResponseError extends Error {
|
||||
readonly code: number;
|
||||
readonly statusCode: number;
|
||||
readonly url: string;
|
||||
|
||||
constructor(res: Response) {
|
||||
super(res.statusText);
|
||||
constructor(public readonly res: UndiciResponseData, public readonly url: string) {
|
||||
super('HTTP ' + res.statusCode);
|
||||
|
||||
if ('captureStackTrace' in Error) {
|
||||
Error.captureStackTrace(this, ResponseError);
|
||||
Error.captureStackTrace(this, UndiciResponseError);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sukka/unicorn/custom-error-definition -- deliberatly use previous name
|
||||
this.name = this.constructor.name;
|
||||
this.res = res;
|
||||
this.code = res.status;
|
||||
this.statusCode = res.status;
|
||||
this.url = res.url;
|
||||
this.code = res.statusCode;
|
||||
this.statusCode = res.statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,17 +139,15 @@ export const defaultRequestInit = {
|
||||
}
|
||||
};
|
||||
|
||||
export async function fetchWithLog(url: RequestInfo, opts: RequestInit = defaultRequestInit) {
|
||||
export async function requestWithLog(url: string, opt?: Parameters<typeof undici.request>[1]) {
|
||||
try {
|
||||
// this will be retried
|
||||
const res = (await fetch(url, opts));
|
||||
|
||||
if (res.status >= 400) {
|
||||
throw new ResponseError(res);
|
||||
const res = await undici.request(url, opt);
|
||||
if (res.statusCode >= 400) {
|
||||
throw new UndiciResponseError(res, url);
|
||||
}
|
||||
|
||||
if (!res.ok && res.status !== 304) {
|
||||
throw new ResponseError(res);
|
||||
if (!(res.statusCode >= 200 && res.statusCode <= 299) && res.statusCode !== 304) {
|
||||
throw new UndiciResponseError(res, url);
|
||||
}
|
||||
|
||||
return res;
|
||||
@ -167,4 +165,4 @@ export async function fetchWithLog(url: RequestInfo, opts: RequestInit = default
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import { TextDecoderStream } from 'node:stream/web';
|
||||
import { processLine } from './process-line';
|
||||
import { $fetch } from './make-fetch-happen';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
import type { UndiciResponseData } from './fetch-retry';
|
||||
|
||||
function getReadableStream(file: string | FileHandle): ReadableStream {
|
||||
if (typeof file === 'string') {
|
||||
@ -21,22 +22,26 @@ export const readFileByLine: ((file: string | FileHandle) => AsyncIterable<strin
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
.pipeThrough(new TextLineStream());
|
||||
|
||||
function ensureResponseBody<T extends Response | NodeFetchResponse>(resp: T): NonNullable<T['body']> {
|
||||
function ensureResponseBody<T extends Response | NodeFetchResponse | UndiciResponseData>(resp: T): NonNullable<T['body']> {
|
||||
if (!resp.body) {
|
||||
throw new Error('Failed to fetch remote text');
|
||||
}
|
||||
if (resp.bodyUsed) {
|
||||
if ('bodyUsed' in resp && resp.bodyUsed) {
|
||||
throw new Error('Body has already been consumed.');
|
||||
}
|
||||
return resp.body;
|
||||
}
|
||||
|
||||
export const createReadlineInterfaceFromResponse: ((resp: Response | NodeFetchResponse) => AsyncIterable<string>) = (resp) => {
|
||||
export const createReadlineInterfaceFromResponse: ((resp: Response | NodeFetchResponse | UndiciResponseData) => AsyncIterable<string>) = (resp) => {
|
||||
const stream = ensureResponseBody(resp);
|
||||
|
||||
const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
|
||||
? stream
|
||||
: Readable.toWeb(new Readable().wrap(stream)) as any;
|
||||
: (
|
||||
'body' in stream
|
||||
? stream.body
|
||||
: Readable.toWeb(new Readable().wrap(stream)) as any
|
||||
);
|
||||
|
||||
return webStream
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
|
||||
@ -2,6 +2,7 @@ import { createReadlineInterfaceFromResponse } from './fetch-text-by-line';
|
||||
import { parse as tldtsParse } from 'tldts';
|
||||
import { $fetch } from './make-fetch-happen';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
import type { UndiciResponseData } from './fetch-retry';
|
||||
|
||||
function isDomainLoose(domain: string): boolean {
|
||||
const { isIcann, isPrivate, isIp } = tldtsParse(domain);
|
||||
@ -15,7 +16,7 @@ export function extractDomainsFromFelixDnsmasq(line: string): string | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function parseFelixDnsmasqFromResp(resp: Response | NodeFetchResponse): Promise<string[]> {
|
||||
export async function parseFelixDnsmasqFromResp(resp: NodeFetchResponse | UndiciResponseData): Promise<string[]> {
|
||||
const results: string[] = [];
|
||||
|
||||
for await (const line of createReadlineInterfaceFromResponse(resp)) {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user