mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-13 01:30:37 +08:00
Feat: introduce make-fetch-happen (#44)
This commit is contained in:
parent
bb07225f6c
commit
c75f7fcc76
@ -3,10 +3,9 @@ import path from 'node:path';
|
||||
import { Sema } from 'async-sema';
|
||||
import { getHostname } from 'tldts-experimental';
|
||||
import { task } from './trace';
|
||||
import { fetchWithRetry } from './lib/fetch-retry';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { readFileIntoProcessedArray } from './lib/fetch-text-by-line';
|
||||
import { TTL, deserializeArray, fsFetchCache, serializeArray, createCacheKey } from './lib/cache-filesystem';
|
||||
|
||||
import { DomainsetOutput } from './lib/create-file';
|
||||
import { OUTPUT_SURGE_DIR } from './constants/dir';
|
||||
@ -137,20 +136,10 @@ const PREDEFINE_DOMAINS = [
|
||||
];
|
||||
|
||||
const s = new Sema(2);
|
||||
const cacheKey = createCacheKey(__filename);
|
||||
|
||||
const latestTopUserAgentsPromise = fsFetchCache.applyWithHttp304<string[]>(
|
||||
'https://cdn.jsdelivr.net/npm/top-user-agents@latest/src/desktop.json',
|
||||
cacheKey('https://cdn.jsdelivr.net/npm/top-user-agents@latest/src/desktop.json'),
|
||||
async (res) => {
|
||||
const userAgents = await (res.json() as Promise<string[]>);
|
||||
return userAgents.filter(ua => ua.startsWith('Mozilla/5.0 '));
|
||||
},
|
||||
{
|
||||
serializer: serializeArray,
|
||||
deserializer: deserializeArray
|
||||
}
|
||||
);
|
||||
const latestTopUserAgentsPromise = $fetch('https://cdn.jsdelivr.net/npm/top-user-agents@latest/src/desktop.json')
|
||||
.then(res => res.json())
|
||||
.then((userAgents: string[]) => userAgents.filter(ua => ua.startsWith('Mozilla/5.0 ')));
|
||||
|
||||
const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>> => {
|
||||
const topUserAgents = await latestTopUserAgentsPromise;
|
||||
@ -160,45 +149,34 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
|
||||
try {
|
||||
const randomUserAgent = topUserAgents[Math.floor(Math.random() * topUserAgents.length)];
|
||||
|
||||
return await fsFetchCache.apply(
|
||||
cacheKey(url),
|
||||
() => s.acquire().then(() => fetchWithRetry(url, {
|
||||
headers: {
|
||||
dnt: '1',
|
||||
Referer: 'https://www.speedtest.net/',
|
||||
accept: 'application/json, text/plain, */*',
|
||||
'User-Agent': randomUserAgent,
|
||||
'Accept-Language': 'en-US,en;q=0.9',
|
||||
...(randomUserAgent.includes('Chrome')
|
||||
? {
|
||||
'Sec-Ch-Ua-Mobile': '?0',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'Sec-Gpc': '1'
|
||||
}
|
||||
: {})
|
||||
},
|
||||
signal: AbortSignal.timeout(1000 * 60),
|
||||
retry: {
|
||||
retries: 2
|
||||
}
|
||||
})).then(r => r.json() as any).then((data: Array<{ url: string, host: string }>) => data.reduce<string[]>(
|
||||
(prev, cur) => {
|
||||
const line = cur.host || cur.url;
|
||||
const hn = getHostname(line, { detectIp: false, validateHostname: true });
|
||||
if (hn) {
|
||||
prev.push(hn);
|
||||
return await s.acquire().then(() => $fetch(url, {
|
||||
headers: {
|
||||
dnt: '1',
|
||||
Referer: 'https://www.speedtest.net/',
|
||||
accept: 'application/json, text/plain, */*',
|
||||
'User-Agent': randomUserAgent,
|
||||
'Accept-Language': 'en-US,en;q=0.9',
|
||||
...(randomUserAgent.includes('Chrome')
|
||||
? {
|
||||
'Sec-Ch-Ua-Mobile': '?0',
|
||||
'Sec-Fetch-Dest': 'empty',
|
||||
'Sec-Fetch-Mode': 'cors',
|
||||
'Sec-Fetch-Site': 'same-origin',
|
||||
'Sec-Gpc': '1'
|
||||
}
|
||||
return prev;
|
||||
}, []
|
||||
)).finally(() => s.release()),
|
||||
{
|
||||
ttl: TTL.ONE_WEEK(),
|
||||
serializer: serializeArray,
|
||||
deserializer: deserializeArray
|
||||
}
|
||||
);
|
||||
: {})
|
||||
},
|
||||
timeout: 1000 * 60
|
||||
})).then(r => r.json() as any).then((data: Array<{ url: string, host: string }>) => data.reduce<string[]>(
|
||||
(prev, cur) => {
|
||||
const line = cur.host || cur.url;
|
||||
const hn = getHostname(line, { detectIp: false, validateHostname: true });
|
||||
if (hn) {
|
||||
prev.push(hn);
|
||||
}
|
||||
return prev;
|
||||
}, []
|
||||
)).finally(() => s.release());
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return [];
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
// @ts-check
|
||||
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
||||
import { createReadlineInterfaceFromResponse } from './lib/fetch-text-by-line';
|
||||
import { isProbablyIpv4, isProbablyIpv6 } from './lib/is-fast-ip';
|
||||
import { processLine } from './lib/process-line';
|
||||
@ -7,9 +6,10 @@ import { task } from './trace';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { createMemoizedPromise } from './lib/memo-promise';
|
||||
import { RulesetOutput } from './lib/create-file';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
|
||||
export const getTelegramCIDRPromise = createMemoizedPromise(async () => {
|
||||
const resp = await fetchWithRetry('https://core.telegram.org/resources/cidr.txt', defaultRequestInit);
|
||||
const resp = await $fetch('https://core.telegram.org/resources/cidr.txt');
|
||||
const lastModified = resp.headers.get('last-modified');
|
||||
const date = lastModified ? new Date(lastModified) : new Date();
|
||||
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
import { task } from './trace';
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { fetchWithRetry } from './lib/fetch-retry';
|
||||
import { OUTPUT_MOCK_DIR } from './constants/dir';
|
||||
import { mkdirp } from './lib/misc';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
|
||||
const ASSETS_LIST = {
|
||||
'www-google-analytics-com_ga.js': 'https://raw.githubusercontent.com/AdguardTeam/Scriptlets/master/dist/redirect-files/google-analytics-ga.js',
|
||||
@ -18,7 +17,7 @@ const ASSETS_LIST = {
|
||||
export const downloadMockAssets = task(require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map(
|
||||
([filename, url]) => span
|
||||
.traceChildAsync(url, async () => {
|
||||
const res = await fetchWithRetry(url);
|
||||
const res = await $fetch(url);
|
||||
if (!res.body) {
|
||||
throw new Error(`Empty body from ${url}`);
|
||||
}
|
||||
@ -27,7 +26,7 @@ export const downloadMockAssets = task(require.main === module, __filename)((spa
|
||||
const src = path.join(OUTPUT_MOCK_DIR, filename);
|
||||
|
||||
return pipeline(
|
||||
Readable.fromWeb(res.body),
|
||||
res.body,
|
||||
fs.createWriteStream(src, 'utf-8')
|
||||
);
|
||||
})
|
||||
|
||||
@ -1,13 +1,12 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import picocolors from 'picocolors';
|
||||
import { task } from './trace';
|
||||
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
||||
import { extract as tarExtract } from 'tar-fs';
|
||||
import type { Headers as TarEntryHeaders } from 'tar-fs';
|
||||
import zlib from 'node:zlib';
|
||||
import { Readable } from 'node:stream';
|
||||
import picocolors from 'picocolors';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
|
||||
const GITHUB_CODELOAD_URL = 'https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master';
|
||||
const GITLAB_CODELOAD_URL = 'https://gitlab.com/SukkaW/ruleset.skk.moe/-/archive/master/ruleset.skk.moe-master.tar.gz';
|
||||
@ -21,13 +20,7 @@ export const downloadPreviousBuild = task(require.main === module, __filename)(a
|
||||
}
|
||||
|
||||
const tarGzUrl = await span.traceChildAsync('get tar.gz url', async () => {
|
||||
const resp = await fetchWithRetry(GITHUB_CODELOAD_URL, {
|
||||
...defaultRequestInit,
|
||||
method: 'HEAD',
|
||||
retry: {
|
||||
retryOnNon2xx: false
|
||||
}
|
||||
});
|
||||
const resp = await $fetch(GITHUB_CODELOAD_URL, { method: 'HEAD' });
|
||||
if (resp.status !== 200) {
|
||||
console.warn('Download previous build from GitHub failed! Status:', resp.status);
|
||||
console.warn('Switch to GitLab');
|
||||
@ -37,19 +30,12 @@ export const downloadPreviousBuild = task(require.main === module, __filename)(a
|
||||
});
|
||||
|
||||
return span.traceChildAsync('download & extract previoud build', async () => {
|
||||
const resp = await fetchWithRetry(tarGzUrl, {
|
||||
const resp = await $fetch(tarGzUrl, {
|
||||
headers: {
|
||||
'User-Agent': 'curl/8.9.1',
|
||||
// https://github.com/unjs/giget/issues/97
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/commit/50c11f278d18fe1f3fb12eb595067216bb58ade2
|
||||
'sec-fetch-mode': 'same-origin'
|
||||
},
|
||||
// https://github.com/unjs/giget/issues/97
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/commit/50c11f278d18fe1f3fb12eb595067216bb58ade2
|
||||
|
||||
mode: 'same-origin',
|
||||
retry: {
|
||||
retryOnNon2xx: false
|
||||
}
|
||||
});
|
||||
|
||||
@ -79,7 +65,7 @@ export const downloadPreviousBuild = task(require.main === module, __filename)(a
|
||||
);
|
||||
|
||||
return pipeline(
|
||||
Readable.fromWeb(resp.body),
|
||||
resp.body,
|
||||
gunzip,
|
||||
extract
|
||||
);
|
||||
|
||||
@ -1,14 +1,13 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import fsp from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import zlib from 'node:zlib';
|
||||
import process from 'node:process';
|
||||
|
||||
import { async as ezspawn } from '@jsdevtools/ez-spawn';
|
||||
import { mkdirp } from './misc';
|
||||
import { fetchWithRetry } from './fetch-retry';
|
||||
import { $fetch } from './make-fetch-happen';
|
||||
|
||||
const mihomoBinaryDir = path.join(__dirname, '../../node_modules/.cache/mihomo');
|
||||
const mihomoBinaryPath = path.join(mihomoBinaryDir, 'mihomo');
|
||||
@ -33,7 +32,7 @@ const ensureMihomoBinary = async () => {
|
||||
throw new Error(`Unsupported platform: ${process.platform} ${process.arch}`);
|
||||
}
|
||||
|
||||
const res = await fetchWithRetry(downloadUrl);
|
||||
const res = await $fetch(downloadUrl);
|
||||
|
||||
if (!res.ok || !res.body) {
|
||||
throw new Error(`Failed to download mihomo binary: ${res.statusText}`);
|
||||
@ -42,7 +41,7 @@ const ensureMihomoBinary = async () => {
|
||||
const gunzip = zlib.createGunzip();
|
||||
|
||||
await pipeline(
|
||||
Readable.fromWeb(res.body),
|
||||
res.body,
|
||||
gunzip,
|
||||
writeStream
|
||||
);
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
import fs from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
import { fetchWithRetry, defaultRequestInit } from './fetch-retry';
|
||||
import type { FileHandle } from 'node:fs/promises';
|
||||
|
||||
import { TextLineStream } from './text-line-transform-stream';
|
||||
import type { ReadableStream } from 'node:stream/web';
|
||||
import { TextDecoderStream } from 'node:stream/web';
|
||||
import { processLine } from './process-line';
|
||||
import { $fetch } from './make-fetch-happen';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
|
||||
const getReadableStream = (file: string | FileHandle): ReadableStream => {
|
||||
if (typeof file === 'string') {
|
||||
@ -20,7 +21,7 @@ export const readFileByLine: ((file: string | FileHandle) => AsyncIterable<strin
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
.pipeThrough(new TextLineStream());
|
||||
|
||||
const ensureResponseBody = (resp: Response) => {
|
||||
const ensureResponseBody = <T extends Response | NodeFetchResponse>(resp: T): NonNullable<T['body']> => {
|
||||
if (!resp.body) {
|
||||
throw new Error('Failed to fetch remote text');
|
||||
}
|
||||
@ -30,12 +31,20 @@ const ensureResponseBody = (resp: Response) => {
|
||||
return resp.body;
|
||||
};
|
||||
|
||||
export const createReadlineInterfaceFromResponse: ((resp: Response) => AsyncIterable<string>) = (resp) => ensureResponseBody(resp)
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
.pipeThrough(new TextLineStream());
|
||||
export const createReadlineInterfaceFromResponse: ((resp: Response | NodeFetchResponse) => AsyncIterable<string>) = (resp) => {
|
||||
const stream = ensureResponseBody(resp);
|
||||
|
||||
export function fetchRemoteTextByLine(url: string | URL) {
|
||||
return fetchWithRetry(url, defaultRequestInit).then(createReadlineInterfaceFromResponse);
|
||||
const webStream: ReadableStream<Uint8Array> = 'getReader' in stream
|
||||
? stream
|
||||
: Readable.toWeb(new Readable().wrap(stream)) as any;
|
||||
|
||||
return webStream
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
.pipeThrough(new TextLineStream());
|
||||
};
|
||||
|
||||
export function fetchRemoteTextByLine(url: string) {
|
||||
return $fetch(url).then(createReadlineInterfaceFromResponse);
|
||||
}
|
||||
|
||||
export async function readFileIntoProcessedArray(file: string | FileHandle) {
|
||||
|
||||
23
Build/lib/make-fetch-happen.ts
Normal file
23
Build/lib/make-fetch-happen.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import path from 'node:path';
|
||||
import fs from 'node:fs';
|
||||
import makeFetchHappen from 'make-fetch-happen';
|
||||
// eslint-disable-next-line @typescript-eslint/no-restricted-imports -- type only
|
||||
export type { Response as NodeFetchResponse } from 'node-fetch';
|
||||
|
||||
const cachePath = path.resolve(__dirname, '../../.cache/__make_fetch_happen__');
|
||||
fs.mkdirSync(cachePath, { recursive: true });
|
||||
|
||||
export const $fetch = makeFetchHappen.defaults({
|
||||
cachePath,
|
||||
maxSockets: 32, /**
|
||||
* They said 15 is a good default that prevents knocking out others' routers,
|
||||
* I disagree. 32 is a good number.
|
||||
*/
|
||||
headers: {
|
||||
'User-Agent': 'curl/8.9.1 (https://github.com/SukkaW/Surge)'
|
||||
},
|
||||
retry: {
|
||||
retries: 5,
|
||||
randomize: true
|
||||
}
|
||||
});
|
||||
@ -1,6 +1,7 @@
|
||||
import { createReadlineInterfaceFromResponse } from './fetch-text-by-line';
|
||||
import { parse as tldtsParse } from 'tldts';
|
||||
import { fetchWithRetry, defaultRequestInit } from './fetch-retry';
|
||||
import { $fetch } from './make-fetch-happen';
|
||||
import type { NodeFetchResponse } from './make-fetch-happen';
|
||||
|
||||
const isDomainLoose = (domain: string): boolean => {
|
||||
const { isIcann, isPrivate, isIp } = tldtsParse(domain);
|
||||
@ -14,7 +15,7 @@ export const extractDomainsFromFelixDnsmasq = (line: string): string | null => {
|
||||
return null;
|
||||
};
|
||||
|
||||
export const parseFelixDnsmasqFromResp = async (resp: Response): Promise<string[]> => {
|
||||
export const parseFelixDnsmasqFromResp = async (resp: Response | NodeFetchResponse): Promise<string[]> => {
|
||||
const results: string[] = [];
|
||||
|
||||
for await (const line of createReadlineInterfaceFromResponse(resp)) {
|
||||
@ -27,7 +28,7 @@ export const parseFelixDnsmasqFromResp = async (resp: Response): Promise<string[
|
||||
return results;
|
||||
};
|
||||
|
||||
export const parseFelixDnsmasq = async (url: string | URL): Promise<string[]> => {
|
||||
const resp = await fetchWithRetry(url, defaultRequestInit);
|
||||
export const parseFelixDnsmasq = async (url: string): Promise<string[]> => {
|
||||
const resp = await $fetch(url);
|
||||
return parseFelixDnsmasqFromResp(resp);
|
||||
};
|
||||
|
||||
@ -5,14 +5,14 @@ import path from 'node:path';
|
||||
import { processLine } from './lib/process-line';
|
||||
import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
|
||||
import { SOURCE_DIR } from './constants/dir';
|
||||
import { fetchWithRetry } from './lib/fetch-retry';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
|
||||
export const parseDomesticList = async () => {
|
||||
const trie = createTrie(await parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf'));
|
||||
|
||||
const top5000 = new Set<string>();
|
||||
|
||||
const res = await (await fetchWithRetry('https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=1077&top=10000', {
|
||||
const res = await (await $fetch('https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=1077&top=10000', {
|
||||
headers: {
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,es;q=0.5',
|
||||
|
||||
@ -6,13 +6,13 @@ import { parse } from 'csv-parse/sync';
|
||||
import { readFileByLine } from './lib/fetch-text-by-line';
|
||||
import path from 'node:path';
|
||||
import { SOURCE_DIR } from './constants/dir';
|
||||
import { fetchWithRetry } from './lib/fetch-retry';
|
||||
import { $fetch } from './lib/make-fetch-happen';
|
||||
|
||||
export const parseGfwList = async () => {
|
||||
const whiteSet = new Set<string>();
|
||||
const blackSet = new Set<string>();
|
||||
|
||||
const text = await (await fetchWithRetry('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
||||
const text = await (await $fetch('https://raw.githubusercontent.com/gfwlist/gfwlist/master/gfwlist.txt')).text();
|
||||
for (const l of atob(text).split('\n')) {
|
||||
const line = processLine(l);
|
||||
if (!line) continue;
|
||||
@ -55,13 +55,13 @@ export const parseGfwList = async () => {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
for (const l of (await (await fetchWithRetry('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
||||
for (const l of (await (await $fetch('https://raw.githubusercontent.com/Loyalsoldier/cn-blocked-domain/release/domains.txt')).text()).split('\n')) {
|
||||
blackSet.add(l);
|
||||
}
|
||||
|
||||
const top500Gfwed = new Set<string>();
|
||||
|
||||
const res = await (await fetchWithRetry('https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=1077&top=10000', {
|
||||
const res = await (await $fetch('https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=1077&top=10000', {
|
||||
headers: {
|
||||
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
|
||||
'accept-language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,es;q=0.5',
|
||||
|
||||
@ -32,6 +32,7 @@
|
||||
"fdir": "^6.4.0",
|
||||
"foxact": "^0.2.38",
|
||||
"json-stringify-pretty-compact": "^3.0.0",
|
||||
"make-fetch-happen": "^14.0.1",
|
||||
"mnemonist": "^0.39.8",
|
||||
"picocolors": "^1.1.0",
|
||||
"punycode": "^2.3.1",
|
||||
@ -48,7 +49,9 @@
|
||||
"@types/async-retry": "^1.4.9",
|
||||
"@types/better-sqlite3": "^7.6.11",
|
||||
"@types/chai": "^4.3.20",
|
||||
"@types/make-fetch-happen": "^10.0.4",
|
||||
"@types/mocha": "^10.0.9",
|
||||
"@types/node-fetch": "2",
|
||||
"@types/punycode": "^2.1.4",
|
||||
"@types/tar-fs": "^2.0.4",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
|
||||
559
pnpm-lock.yaml
generated
559
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user