mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Chore: use more make-fetch-happen
This commit is contained in:
parent
ed85377503
commit
db7a4bc97a
@ -2,19 +2,10 @@ import { parseFelixDnsmasqFromResp } from './lib/parse-dnsmasq';
|
|||||||
import { task } from './trace';
|
import { task } from './trace';
|
||||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||||
import { createMemoizedPromise } from './lib/memo-promise';
|
import { createMemoizedPromise } from './lib/memo-promise';
|
||||||
import { deserializeArray, fsFetchCache, serializeArray, getFileContentHash } from './lib/cache-filesystem';
|
|
||||||
import { DomainsetOutput } from './lib/create-file';
|
import { DomainsetOutput } from './lib/create-file';
|
||||||
|
import { $fetch } from './lib/make-fetch-happen';
|
||||||
|
|
||||||
const url = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf';
|
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => $fetch('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf').then(parseFelixDnsmasqFromResp));
|
||||||
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsFetchCache.applyWithHttp304(
|
|
||||||
url,
|
|
||||||
getFileContentHash(__filename),
|
|
||||||
parseFelixDnsmasqFromResp,
|
|
||||||
{
|
|
||||||
serializer: serializeArray,
|
|
||||||
deserializer: deserializeArray
|
|
||||||
}
|
|
||||||
));
|
|
||||||
|
|
||||||
export const buildAppleCdn = task(require.main === module, __filename)(async (span) => {
|
export const buildAppleCdn = task(require.main === module, __filename)(async (span) => {
|
||||||
const res: string[] = await span.traceChildPromise('get apple cdn domains', getAppleCdnDomainsPromise());
|
const res: string[] = await span.traceChildPromise('get apple cdn domains', getAppleCdnDomainsPromise());
|
||||||
|
|||||||
@ -1,9 +1,8 @@
|
|||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { readFileIntoProcessedArray } from './lib/fetch-text-by-line';
|
import { fetchRemoteTextByLine, readFileIntoProcessedArray } from './lib/fetch-text-by-line';
|
||||||
import { createTrie } from './lib/trie';
|
import { HostnameTrie } from './lib/trie';
|
||||||
import { task } from './trace';
|
import { task } from './trace';
|
||||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||||
import { getPublicSuffixListTextPromise } from './lib/download-publicsuffixlist';
|
|
||||||
import { appendArrayInPlace } from './lib/append-array-in-place';
|
import { appendArrayInPlace } from './lib/append-array-in-place';
|
||||||
import { SOURCE_DIR } from './constants/dir';
|
import { SOURCE_DIR } from './constants/dir';
|
||||||
import { processLine } from './lib/process-line';
|
import { processLine } from './lib/process-line';
|
||||||
@ -11,16 +10,14 @@ import { DomainsetOutput } from './lib/create-file';
|
|||||||
import { CRASHLYTICS_WHITELIST } from './constants/reject-data-source';
|
import { CRASHLYTICS_WHITELIST } from './constants/reject-data-source';
|
||||||
|
|
||||||
const getS3OSSDomainsPromise = (async (): Promise<string[]> => {
|
const getS3OSSDomainsPromise = (async (): Promise<string[]> => {
|
||||||
const trie = createTrie((await getPublicSuffixListTextPromise()).reduce<string[]>(
|
const trie = new HostnameTrie();
|
||||||
(acc, cur) => {
|
|
||||||
const tmp = processLine(cur);
|
for await (const line of await fetchRemoteTextByLine('https://publicsuffix.org/list/public_suffix_list.dat')) {
|
||||||
if (tmp) {
|
const tmp = processLine(line);
|
||||||
acc.push(tmp);
|
if (tmp) {
|
||||||
}
|
trie.add(tmp);
|
||||||
return acc;
|
}
|
||||||
},
|
}
|
||||||
[]
|
|
||||||
));
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract OSS domain from publicsuffix list
|
* Extract OSS domain from publicsuffix list
|
||||||
@ -68,10 +65,11 @@ export const buildCdnDownloadConf = task(require.main === module, __filename)(as
|
|||||||
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/steam.conf'))
|
readFileIntoProcessedArray(path.join(SOURCE_DIR, 'domainset/steam.conf'))
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// Move S3 domains to download domain set, since S3 files may be large
|
||||||
appendArrayInPlace(downloadDomainSet, S3OSSDomains.map(domain => `.${domain}`));
|
appendArrayInPlace(downloadDomainSet, S3OSSDomains.map(domain => `.${domain}`));
|
||||||
appendArrayInPlace(downloadDomainSet, steamDomainSet);
|
appendArrayInPlace(downloadDomainSet, steamDomainSet);
|
||||||
|
|
||||||
// we have whitelisted the crashlytics domain, but it doesn't mean we can't put it in CDN policy
|
// we have whitelisted the crashlytics domain, and we also want to put it in CDN policy
|
||||||
appendArrayInPlace(cdnDomainsList, CRASHLYTICS_WHITELIST);
|
appendArrayInPlace(cdnDomainsList, CRASHLYTICS_WHITELIST);
|
||||||
|
|
||||||
return Promise.all([
|
return Promise.all([
|
||||||
|
|||||||
@ -8,33 +8,25 @@ import { fsFetchCache, getFileContentHash } from './lib/cache-filesystem';
|
|||||||
import { processLine } from './lib/process-line';
|
import { processLine } from './lib/process-line';
|
||||||
import { RulesetOutput } from './lib/create-file';
|
import { RulesetOutput } from './lib/create-file';
|
||||||
import { SOURCE_DIR } from './constants/dir';
|
import { SOURCE_DIR } from './constants/dir';
|
||||||
|
import { $fetch } from './lib/make-fetch-happen';
|
||||||
|
|
||||||
const BOGUS_NXDOMAIN_URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
|
const BOGUS_NXDOMAIN_URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
|
||||||
|
const getBogusNxDomainIPsPromise: Promise<[ipv4: string[], ipv6: string[]]> = $fetch(BOGUS_NXDOMAIN_URL).then(async (resp) => {
|
||||||
|
const ipv4: string[] = [];
|
||||||
|
const ipv6: string[] = [];
|
||||||
|
|
||||||
const getBogusNxDomainIPsPromise = fsFetchCache.applyWithHttp304(
|
for await (const line of createReadlineInterfaceFromResponse(resp)) {
|
||||||
BOGUS_NXDOMAIN_URL,
|
if (line.startsWith('bogus-nxdomain=')) {
|
||||||
getFileContentHash(__filename),
|
const ip = line.slice(15).trim();
|
||||||
async (resp) => {
|
if (isProbablyIpv4(ip)) {
|
||||||
const ipv4: string[] = [];
|
ipv4.push(ip);
|
||||||
const ipv6: string[] = [];
|
} else if (isProbablyIpv6(ip)) {
|
||||||
|
ipv6.push(ip);
|
||||||
for await (const line of createReadlineInterfaceFromResponse(resp)) {
|
|
||||||
if (line.startsWith('bogus-nxdomain=')) {
|
|
||||||
const ip = line.slice(15).trim();
|
|
||||||
if (isProbablyIpv4(ip)) {
|
|
||||||
ipv4.push(ip);
|
|
||||||
} else if (isProbablyIpv6(ip)) {
|
|
||||||
ipv6.push(ip);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return [ipv4, ipv6] as const;
|
|
||||||
},
|
|
||||||
{
|
|
||||||
serializer: JSON.stringify,
|
|
||||||
deserializer: JSON.parse
|
|
||||||
}
|
}
|
||||||
);
|
return [ipv4, ipv6] as const;
|
||||||
|
});
|
||||||
|
|
||||||
const BOTNET_FILTER_URL = 'https://malware-filter.pages.dev/botnet-filter-dnscrypt-blocked-ips.txt';
|
const BOTNET_FILTER_URL = 'https://malware-filter.pages.dev/botnet-filter-dnscrypt-blocked-ips.txt';
|
||||||
const BOTNET_FILTER_MIRROR_URL = [
|
const BOTNET_FILTER_MIRROR_URL = [
|
||||||
|
|||||||
@ -278,10 +278,6 @@ export class Cache<S = string> {
|
|||||||
return fn(await fetchAssetsWithout304(primaryUrl, mirrorUrls));
|
return fn(await fetchAssetsWithout304(primaryUrl, mirrorUrls));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mirrorUrls.length === 0) {
|
|
||||||
return this.applyWithHttp304(primaryUrl, extraCacheKey, async (resp) => fn(await resp.body.text()), opt);
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseKey = primaryUrl + '$' + extraCacheKey;
|
const baseKey = primaryUrl + '$' + extraCacheKey;
|
||||||
const getETagKey = (url: string) => baseKey + '$' + url + '$etag';
|
const getETagKey = (url: string) => baseKey + '$' + url + '$etag';
|
||||||
const cachedKey = baseKey + '$cached';
|
const cachedKey = baseKey + '$cached';
|
||||||
@ -346,10 +342,12 @@ export class Cache<S = string> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const text = await Promise.any([
|
const text = mirrorUrls.length === 0
|
||||||
createFetchFallbackPromise(primaryUrl, -1),
|
? await createFetchFallbackPromise(primaryUrl, -1)
|
||||||
...mirrorUrls.map(createFetchFallbackPromise)
|
: await Promise.any([
|
||||||
]);
|
createFetchFallbackPromise(primaryUrl, -1),
|
||||||
|
...mirrorUrls.map(createFetchFallbackPromise)
|
||||||
|
]);
|
||||||
|
|
||||||
console.log(picocolors.yellow('[cache] miss'), primaryUrl);
|
console.log(picocolors.yellow('[cache] miss'), primaryUrl);
|
||||||
const serializer = 'serializer' in opt ? opt.serializer : identity as any;
|
const serializer = 'serializer' in opt ? opt.serializer : identity as any;
|
||||||
|
|||||||
@ -1,15 +0,0 @@
|
|||||||
import { deserializeArray, fsFetchCache, getFileContentHash, serializeArray } from './cache-filesystem';
|
|
||||||
import { createMemoizedPromise } from './memo-promise';
|
|
||||||
|
|
||||||
export const getPublicSuffixListTextPromise = createMemoizedPromise(() => fsFetchCache.applyWithHttp304<string[]>(
|
|
||||||
'https://publicsuffix.org/list/public_suffix_list.dat',
|
|
||||||
getFileContentHash(__filename),
|
|
||||||
(r) => r.body.text().then(text => text.split('\n')),
|
|
||||||
{
|
|
||||||
// https://github.com/publicsuffix/list/blob/master/.github/workflows/tld-update.yml
|
|
||||||
// Though the action runs every 24 hours, the IANA list is updated every 7 days.
|
|
||||||
// So a 3 day TTL should be enough.
|
|
||||||
serializer: serializeArray,
|
|
||||||
deserializer: deserializeArray
|
|
||||||
}
|
|
||||||
));
|
|
||||||
@ -7,7 +7,6 @@ import undici, {
|
|||||||
|
|
||||||
import type {
|
import type {
|
||||||
Dispatcher,
|
Dispatcher,
|
||||||
RequestInit,
|
|
||||||
Response
|
Response
|
||||||
} from 'undici';
|
} from 'undici';
|
||||||
|
|
||||||
@ -143,33 +142,33 @@ export const defaultRequestInit = {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function fetchWithLog(url: string, init?: RequestInit) {
|
// export async function fetchWithLog(url: string, init?: RequestInit) {
|
||||||
try {
|
// try {
|
||||||
const res = await undici.fetch(url, init);
|
// const res = await undici.fetch(url, init);
|
||||||
if (res.status >= 400) {
|
// if (res.status >= 400) {
|
||||||
throw new ResponseError(res, url);
|
// throw new ResponseError(res, url);
|
||||||
}
|
// }
|
||||||
|
|
||||||
if (!(res.status >= 200 && res.status <= 299) && res.status !== 304) {
|
// if (!(res.status >= 200 && res.status <= 299) && res.status !== 304) {
|
||||||
throw new ResponseError(res, url);
|
// throw new ResponseError(res, url);
|
||||||
}
|
// }
|
||||||
|
|
||||||
return res;
|
// return res;
|
||||||
} catch (err: unknown) {
|
// } catch (err: unknown) {
|
||||||
if (typeof err === 'object' && err !== null && 'name' in err) {
|
// if (typeof err === 'object' && err !== null && 'name' in err) {
|
||||||
if ((
|
// if ((
|
||||||
err.name === 'AbortError'
|
// err.name === 'AbortError'
|
||||||
|| ('digest' in err && err.digest === 'AbortError')
|
// || ('digest' in err && err.digest === 'AbortError')
|
||||||
)) {
|
// )) {
|
||||||
console.log(picocolors.gray('[fetch abort]'), url);
|
// console.log(picocolors.gray('[fetch abort]'), url);
|
||||||
}
|
// }
|
||||||
} else {
|
// } else {
|
||||||
console.log(picocolors.gray('[fetch fail]'), url, { name: (err as any).name }, err);
|
// console.log(picocolors.gray('[fetch fail]'), url, { name: (err as any).name }, err);
|
||||||
}
|
// }
|
||||||
|
|
||||||
throw err;
|
// throw err;
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
export async function requestWithLog(url: string, opt?: Parameters<typeof undici.request>[1]) {
|
export async function requestWithLog(url: string, opt?: Parameters<typeof undici.request>[1]) {
|
||||||
try {
|
try {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user