mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Perf: use Bun.peek() to save a few ticks
This commit is contained in:
parent
c95e96fc61
commit
e626a6b5d2
@ -5,11 +5,11 @@ import { fetchRemoteTextByLine, readFileIntoProcessedArray } from './lib/fetch-t
|
||||
import { task } from './trace';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { isProbablyIpv4, isProbablyIpv6 } from './lib/is-fast-ip';
|
||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
||||
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||
|
||||
const URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
|
||||
|
||||
const getBogusNxDomainIPsPromise = fsCache.apply(
|
||||
const getBogusNxDomainIPsPromise = fsFetchCache.apply(
|
||||
URL,
|
||||
async () => {
|
||||
const result: string[] = [];
|
||||
@ -34,7 +34,13 @@ const getBogusNxDomainIPsPromise = fsCache.apply(
|
||||
|
||||
export const buildAntiBogusDomain = task(import.meta.path, async (span) => {
|
||||
const result: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/ip/reject.conf'));
|
||||
result.push(...(await getBogusNxDomainIPsPromise));
|
||||
|
||||
const peeked = Bun.peek(getBogusNxDomainIPsPromise);
|
||||
const bogusNxDomainIPs = peeked === getBogusNxDomainIPsPromise
|
||||
? await span.traceChild('get bogus nxdomain ips').traceAsyncFn(() => getBogusNxDomainIPsPromise)
|
||||
: (peeked as string[]);
|
||||
|
||||
result.push(...bogusNxDomainIPs);
|
||||
|
||||
const description = [
|
||||
...SHARED_DESCRIPTION,
|
||||
|
||||
@ -5,9 +5,9 @@ import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
|
||||
import { task } from './trace';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { createMemoizedPromise } from './lib/memo-promise';
|
||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
||||
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||
|
||||
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsCache.apply(
|
||||
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsFetchCache.apply(
|
||||
'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf',
|
||||
() => parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf'),
|
||||
{
|
||||
@ -18,7 +18,11 @@ export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsCache.app
|
||||
));
|
||||
|
||||
export const buildAppleCdn = task(import.meta.path, async (span) => {
|
||||
const res = await span.traceChild('get apple cdn domains').traceAsyncFn(getAppleCdnDomainsPromise);
|
||||
const promise = getAppleCdnDomainsPromise();
|
||||
const peeked = Bun.peek(promise);
|
||||
const res: string[] = peeked === promise
|
||||
? await span.traceChild('get apple cdn domains').traceAsyncFn(() => promise)
|
||||
: (peeked as string[]);
|
||||
|
||||
const description = [
|
||||
...SHARED_DESCRIPTION,
|
||||
|
||||
@ -42,12 +42,19 @@ const getS3OSSDomainsPromise = (async (): Promise<Set<string>> => {
|
||||
})();
|
||||
|
||||
export const buildCdnDownloadConf = task(import.meta.path, async (span) => {
|
||||
/** @type {string[]} */
|
||||
const cdnDomainsList: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf'));
|
||||
(await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
|
||||
const [
|
||||
cdnDomainsList,
|
||||
S3OSSDomains,
|
||||
downloadDomainSet,
|
||||
steamDomainSet
|
||||
] = await Promise.all([
|
||||
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf')),
|
||||
getS3OSSDomainsPromise,
|
||||
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/download.conf')),
|
||||
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/steam.conf'))
|
||||
]);
|
||||
|
||||
const downloadDomainSet: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/download.conf'));
|
||||
const steamDomainSet: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/steam.conf'));
|
||||
cdnDomainsList.push(...S3OSSDomains);
|
||||
|
||||
return Promise.all([
|
||||
createRuleset(
|
||||
|
||||
@ -2,7 +2,7 @@ import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
|
||||
import { resolve as pathResolve } from 'path';
|
||||
import { compareAndWriteFile, withBannerArray } from './lib/create-file';
|
||||
import { processLineFromReadline } from './lib/process-line';
|
||||
import { traceAsync, traceSync } from './lib/trace-runner';
|
||||
import { traceSync } from './lib/trace-runner';
|
||||
import { task } from './trace';
|
||||
|
||||
import { exclude } from 'fast-cidr-tools';
|
||||
@ -20,11 +20,7 @@ const INCLUDE_CIDRS = [
|
||||
];
|
||||
|
||||
export const getChnCidrPromise = createMemoizedPromise(async () => {
|
||||
const cidr = await traceAsync(
|
||||
picocolors.gray('download chnroutes2'),
|
||||
async () => processLineFromReadline(await fetchRemoteTextByLine('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')),
|
||||
picocolors.gray
|
||||
);
|
||||
const cidr = await processLineFromReadline(await fetchRemoteTextByLine('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt'));
|
||||
return traceSync(
|
||||
picocolors.gray('processing chnroutes2'),
|
||||
() => exclude([...cidr, ...INCLUDE_CIDRS], EXCLUDE_CIDRS, true),
|
||||
@ -33,7 +29,11 @@ export const getChnCidrPromise = createMemoizedPromise(async () => {
|
||||
});
|
||||
|
||||
export const buildChnCidr = task(import.meta.path, async (span) => {
|
||||
const filteredCidr = await getChnCidrPromise();
|
||||
const cidrPromise = getChnCidrPromise();
|
||||
const peeked = Bun.peek(cidrPromise);
|
||||
const filteredCidr: string[] = peeked === cidrPromise
|
||||
? await span.traceChild('download chnroutes2').tracePromise(cidrPromise)
|
||||
: (peeked as string[]);
|
||||
|
||||
// Can not use SHARED_DESCRIPTION here as different license
|
||||
const description = [
|
||||
|
||||
@ -27,13 +27,19 @@ export const buildDomesticRuleset = task(import.meta.path, async (span) => {
|
||||
'This file contains known addresses that are avaliable in the Mainland China.'
|
||||
];
|
||||
|
||||
const promise = getDomesticDomainsRulesetPromise();
|
||||
const peeked = Bun.peek(promise);
|
||||
const res: string[] = peeked === promise
|
||||
? await promise
|
||||
: (peeked as string[]);
|
||||
|
||||
return Promise.all([
|
||||
createRuleset(
|
||||
span,
|
||||
'Sukka\'s Ruleset - Domestic Domains',
|
||||
rulesetDescription,
|
||||
new Date(),
|
||||
await getDomesticDomainsRulesetPromise(),
|
||||
res,
|
||||
'ruleset',
|
||||
path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'),
|
||||
path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt')
|
||||
|
||||
@ -24,24 +24,21 @@ const BLACKLIST = [
|
||||
];
|
||||
|
||||
export const getMicrosoftCdnRulesetPromise = createMemoizedPromise(async () => {
|
||||
const set = await traceAsync('fetch accelerated-domains.china.conf', async () => {
|
||||
// First trie is to find the microsoft domains that matches probe domains
|
||||
const trie = createTrie();
|
||||
for await (const line of await fetchRemoteTextByLine('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf')) {
|
||||
if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
|
||||
const domain = line.slice(8, -16);
|
||||
trie.add(domain);
|
||||
}
|
||||
// First trie is to find the microsoft domains that matches probe domains
|
||||
const trie = createTrie();
|
||||
for await (const line of await fetchRemoteTextByLine('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf')) {
|
||||
if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
|
||||
const domain = line.slice(8, -16);
|
||||
trie.add(domain);
|
||||
}
|
||||
return new Set(PROBE_DOMAINS.flatMap(domain => trie.find(domain)));
|
||||
});
|
||||
}
|
||||
const set = new Set(PROBE_DOMAINS.flatMap(domain => trie.find(domain)));
|
||||
|
||||
// Second trie is to remove blacklisted domains
|
||||
const trie2 = createTrie(set);
|
||||
const black = BLACKLIST.flatMap(domain => trie2.find(domain, true));
|
||||
for (let i = 0, len = black.length; i < len; i++) {
|
||||
set.delete(black[i]);
|
||||
}
|
||||
BLACKLIST.forEach(black => {
|
||||
trie2.substractSetInPlaceFromFound(black, set);
|
||||
});
|
||||
|
||||
return Array.from(set).map(d => `DOMAIN-SUFFIX,${d}`).concat(WHITELIST);
|
||||
});
|
||||
@ -56,12 +53,18 @@ export const buildMicrosoftCdn = task(import.meta.path, async (span) => {
|
||||
' - https://github.com/felixonmars/dnsmasq-china-list'
|
||||
];
|
||||
|
||||
const promise = getMicrosoftCdnRulesetPromise();
|
||||
const peeked = Bun.peek(promise);
|
||||
const res: string[] = peeked === promise
|
||||
? await span.traceChild('get microsoft cdn domains').tracePromise(promise)
|
||||
: (peeked as string[]);
|
||||
|
||||
return createRuleset(
|
||||
span,
|
||||
'Sukka\'s Ruleset - Microsoft CDN',
|
||||
description,
|
||||
new Date(),
|
||||
await getMicrosoftCdnRulesetPromise(),
|
||||
res,
|
||||
'ruleset',
|
||||
path.resolve(import.meta.dir, '../List/non_ip/microsoft_cdn.conf'),
|
||||
path.resolve(import.meta.dir, '../Clash/non_ip/microsoft_cdn.txt')
|
||||
|
||||
@ -18,9 +18,14 @@ import { getPhishingDomains } from './lib/get-phishing-domains';
|
||||
|
||||
import * as SetHelpers from 'mnemonist/set';
|
||||
import { setAddFromArray } from './lib/set-add-from-array';
|
||||
import type { PublicSuffixList } from '@gorhill/publicsuffixlist';
|
||||
|
||||
export const buildRejectDomainSet = task(import.meta.path, async (span) => {
|
||||
const gorhill = await getGorhillPublicSuffixPromise();
|
||||
const gorhillPromise = getGorhillPublicSuffixPromise();
|
||||
const gorhillPeeked = Bun.peek(gorhillPromise);
|
||||
const gorhill: PublicSuffixList = gorhillPeeked === gorhillPromise
|
||||
? await gorhillPromise
|
||||
: (gorhillPeeked as PublicSuffixList);
|
||||
|
||||
/** Whitelists */
|
||||
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
||||
@ -105,7 +110,7 @@ export const buildRejectDomainSet = task(import.meta.path, async (span) => {
|
||||
const trie = createTrie(domainSets);
|
||||
|
||||
domainSuffixSet.forEach(suffix => {
|
||||
trie.remove(suffix);
|
||||
domainSets.delete(suffix);
|
||||
trie.substractSetInPlaceFromFound(suffix, domainSets);
|
||||
});
|
||||
filterRuleWhitelistDomainSets.forEach(suffix => {
|
||||
|
||||
@ -12,14 +12,14 @@ import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
|
||||
import picocolors from 'picocolors';
|
||||
import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
|
||||
import { processLine } from './lib/process-line';
|
||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
||||
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||
import { createMemoizedPromise } from './lib/memo-promise';
|
||||
|
||||
import * as SetHelpers from 'mnemonist/set';
|
||||
|
||||
const s = new Sema(2);
|
||||
|
||||
const latestTopUserAgentsPromise = fsCache.apply(
|
||||
const latestTopUserAgentsPromise = fsFetchCache.apply(
|
||||
'https://unpkg.com/top-user-agents@latest/src/desktop.json',
|
||||
() => fetchWithRetry('https://unpkg.com/top-user-agents@latest/src/desktop.json')
|
||||
.then(res => res.json<string[]>())
|
||||
@ -39,7 +39,7 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
|
||||
try {
|
||||
const randomUserAgent = topUserAgents[Math.floor(Math.random() * topUserAgents.length)];
|
||||
|
||||
return await fsCache.apply(
|
||||
return await fsFetchCache.apply(
|
||||
url,
|
||||
() => s.acquire().then(() => fetchWithRetry(url, {
|
||||
headers: {
|
||||
|
||||
@ -33,7 +33,11 @@ export const getTelegramCIDRPromise = createMemoizedPromise(async () => {
|
||||
});
|
||||
|
||||
export const buildTelegramCIDR = task(import.meta.path, async (span) => {
|
||||
const { date, results } = await getTelegramCIDRPromise();
|
||||
const promise = getTelegramCIDRPromise();
|
||||
const peeked = Bun.peek(promise);
|
||||
const { date, results } = peeked === promise
|
||||
? await span.traceChild('get telegram cidr').tracePromise(promise)
|
||||
: (peeked as { date: Date, results: string[] });
|
||||
|
||||
if (results.length === 0) {
|
||||
throw new Error('Failed to fetch data!');
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import { TTL, fsCache } from './lib/cache-filesystem';
|
||||
import { TTL, fsFetchCache } from './lib/cache-filesystem';
|
||||
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
||||
import { createMemoizedPromise } from './lib/memo-promise';
|
||||
import { traceAsync } from './lib/trace-runner';
|
||||
|
||||
export const getPublicSuffixListTextPromise = createMemoizedPromise(() => traceAsync(
|
||||
'obtain public_suffix_list',
|
||||
() => fsCache.apply(
|
||||
() => fsFetchCache.apply(
|
||||
'https://publicsuffix.org/list/public_suffix_list.dat',
|
||||
() => fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit).then(r => r.text()),
|
||||
{
|
||||
|
||||
@ -158,9 +158,21 @@ export class Cache {
|
||||
let value: T;
|
||||
if (cached == null) {
|
||||
console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`));
|
||||
value = await fn();
|
||||
|
||||
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
||||
|
||||
const promise = fn();
|
||||
const peeked = Bun.peek(promise);
|
||||
|
||||
if (peeked === promise) {
|
||||
return promise.then((value) => {
|
||||
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
||||
this.set(key, serializer(value), ttl);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
value = peeked as T;
|
||||
this.set(key, serializer(value), ttl);
|
||||
} else {
|
||||
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
|
||||
@ -168,6 +180,7 @@ export class Cache {
|
||||
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
|
||||
value = deserializer(cached);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ import type { Span } from '../trace';
|
||||
|
||||
const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null
|
||||
let foundDebugDomain = false;
|
||||
const temporaryBypass = DEBUG_DOMAIN_TO_FIND !== null;
|
||||
|
||||
export function processDomainLists(span: Span, domainListsUrl: string, includeAllSubDomain = false, ttl: number | null = null) {
|
||||
return span.traceChild(`process domainlist: ${domainListsUrl}`).traceAsyncFn(() => fsFetchCache.apply(
|
||||
@ -38,7 +39,7 @@ export function processDomainLists(span: Span, domainListsUrl: string, includeAl
|
||||
},
|
||||
{
|
||||
ttl,
|
||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
||||
temporaryBypass,
|
||||
serializer: serializeSet,
|
||||
deserializer: deserializeSet
|
||||
}
|
||||
@ -97,7 +98,7 @@ export function processHosts(span: Span, hostsUrl: string, mirrors: string[] | n
|
||||
},
|
||||
{
|
||||
ttl,
|
||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
||||
temporaryBypass,
|
||||
serializer: serializeSet,
|
||||
deserializer: deserializeSet
|
||||
}
|
||||
@ -131,7 +132,11 @@ export async function processFilterRules(
|
||||
|
||||
const warningMessages: string[] = [];
|
||||
|
||||
const gorhill = await getGorhillPublicSuffixPromise();
|
||||
const gorhillPromise = getGorhillPublicSuffixPromise();
|
||||
const peekedGorhill = Bun.peek(gorhillPromise);
|
||||
const gorhill = peekedGorhill === gorhillPromise
|
||||
? await span.traceChild('get gorhill').tracePromise(gorhillPromise)
|
||||
: (peekedGorhill as PublicSuffixList);
|
||||
|
||||
/**
|
||||
* @param {string} line
|
||||
@ -215,7 +220,7 @@ export async function processFilterRules(
|
||||
},
|
||||
{
|
||||
ttl,
|
||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
||||
temporaryBypass,
|
||||
serializer: JSON.stringify,
|
||||
deserializer: JSON.parse
|
||||
}
|
||||
|
||||
@ -28,6 +28,7 @@ export interface Span {
|
||||
readonly traceChild: (name: string) => Span,
|
||||
readonly traceSyncFn: <T>(fn: (span: Span) => T) => T,
|
||||
readonly traceAsyncFn: <T>(fn: (span: Span) => T | Promise<T>) => Promise<T>,
|
||||
readonly tracePromise: <T>(promise: Promise<T>) => Promise<T>,
|
||||
readonly traceResult: TraceResult
|
||||
}
|
||||
|
||||
@ -83,6 +84,13 @@ export const createSpan = (name: string, parentTraceResult?: TraceResult): Span
|
||||
},
|
||||
get traceResult() {
|
||||
return curTraceResult;
|
||||
},
|
||||
async tracePromise<T>(promise: Promise<T>): Promise<T> {
|
||||
try {
|
||||
return await promise;
|
||||
} finally {
|
||||
span.stop();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user