mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Perf: use Bun.peek() to save a few ticks
This commit is contained in:
parent
c95e96fc61
commit
e626a6b5d2
@ -5,11 +5,11 @@ import { fetchRemoteTextByLine, readFileIntoProcessedArray } from './lib/fetch-t
|
|||||||
import { task } from './trace';
|
import { task } from './trace';
|
||||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||||
import { isProbablyIpv4, isProbablyIpv6 } from './lib/is-fast-ip';
|
import { isProbablyIpv4, isProbablyIpv6 } from './lib/is-fast-ip';
|
||||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||||
|
|
||||||
const URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
|
const URL = 'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/bogus-nxdomain.china.conf';
|
||||||
|
|
||||||
const getBogusNxDomainIPsPromise = fsCache.apply(
|
const getBogusNxDomainIPsPromise = fsFetchCache.apply(
|
||||||
URL,
|
URL,
|
||||||
async () => {
|
async () => {
|
||||||
const result: string[] = [];
|
const result: string[] = [];
|
||||||
@ -34,7 +34,13 @@ const getBogusNxDomainIPsPromise = fsCache.apply(
|
|||||||
|
|
||||||
export const buildAntiBogusDomain = task(import.meta.path, async (span) => {
|
export const buildAntiBogusDomain = task(import.meta.path, async (span) => {
|
||||||
const result: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/ip/reject.conf'));
|
const result: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/ip/reject.conf'));
|
||||||
result.push(...(await getBogusNxDomainIPsPromise));
|
|
||||||
|
const peeked = Bun.peek(getBogusNxDomainIPsPromise);
|
||||||
|
const bogusNxDomainIPs = peeked === getBogusNxDomainIPsPromise
|
||||||
|
? await span.traceChild('get bogus nxdomain ips').traceAsyncFn(() => getBogusNxDomainIPsPromise)
|
||||||
|
: (peeked as string[]);
|
||||||
|
|
||||||
|
result.push(...bogusNxDomainIPs);
|
||||||
|
|
||||||
const description = [
|
const description = [
|
||||||
...SHARED_DESCRIPTION,
|
...SHARED_DESCRIPTION,
|
||||||
|
|||||||
@ -5,9 +5,9 @@ import { parseFelixDnsmasq } from './lib/parse-dnsmasq';
|
|||||||
import { task } from './trace';
|
import { task } from './trace';
|
||||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||||
import { createMemoizedPromise } from './lib/memo-promise';
|
import { createMemoizedPromise } from './lib/memo-promise';
|
||||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||||
|
|
||||||
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsCache.apply(
|
export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsFetchCache.apply(
|
||||||
'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf',
|
'https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf',
|
||||||
() => parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf'),
|
() => parseFelixDnsmasq('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/apple.china.conf'),
|
||||||
{
|
{
|
||||||
@ -18,7 +18,11 @@ export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsCache.app
|
|||||||
));
|
));
|
||||||
|
|
||||||
export const buildAppleCdn = task(import.meta.path, async (span) => {
|
export const buildAppleCdn = task(import.meta.path, async (span) => {
|
||||||
const res = await span.traceChild('get apple cdn domains').traceAsyncFn(getAppleCdnDomainsPromise);
|
const promise = getAppleCdnDomainsPromise();
|
||||||
|
const peeked = Bun.peek(promise);
|
||||||
|
const res: string[] = peeked === promise
|
||||||
|
? await span.traceChild('get apple cdn domains').traceAsyncFn(() => promise)
|
||||||
|
: (peeked as string[]);
|
||||||
|
|
||||||
const description = [
|
const description = [
|
||||||
...SHARED_DESCRIPTION,
|
...SHARED_DESCRIPTION,
|
||||||
|
|||||||
@ -42,12 +42,19 @@ const getS3OSSDomainsPromise = (async (): Promise<Set<string>> => {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
export const buildCdnDownloadConf = task(import.meta.path, async (span) => {
|
export const buildCdnDownloadConf = task(import.meta.path, async (span) => {
|
||||||
/** @type {string[]} */
|
const [
|
||||||
const cdnDomainsList: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf'));
|
cdnDomainsList,
|
||||||
(await getS3OSSDomainsPromise).forEach((domain: string) => { cdnDomainsList.push(`DOMAIN-SUFFIX,${domain}`); });
|
S3OSSDomains,
|
||||||
|
downloadDomainSet,
|
||||||
|
steamDomainSet
|
||||||
|
] = await Promise.all([
|
||||||
|
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/non_ip/cdn.conf')),
|
||||||
|
getS3OSSDomainsPromise,
|
||||||
|
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/download.conf')),
|
||||||
|
readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/steam.conf'))
|
||||||
|
]);
|
||||||
|
|
||||||
const downloadDomainSet: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/download.conf'));
|
cdnDomainsList.push(...S3OSSDomains);
|
||||||
const steamDomainSet: string[] = await readFileIntoProcessedArray(path.resolve(import.meta.dir, '../Source/domainset/steam.conf'));
|
|
||||||
|
|
||||||
return Promise.all([
|
return Promise.all([
|
||||||
createRuleset(
|
createRuleset(
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
|
|||||||
import { resolve as pathResolve } from 'path';
|
import { resolve as pathResolve } from 'path';
|
||||||
import { compareAndWriteFile, withBannerArray } from './lib/create-file';
|
import { compareAndWriteFile, withBannerArray } from './lib/create-file';
|
||||||
import { processLineFromReadline } from './lib/process-line';
|
import { processLineFromReadline } from './lib/process-line';
|
||||||
import { traceAsync, traceSync } from './lib/trace-runner';
|
import { traceSync } from './lib/trace-runner';
|
||||||
import { task } from './trace';
|
import { task } from './trace';
|
||||||
|
|
||||||
import { exclude } from 'fast-cidr-tools';
|
import { exclude } from 'fast-cidr-tools';
|
||||||
@ -20,11 +20,7 @@ const INCLUDE_CIDRS = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
export const getChnCidrPromise = createMemoizedPromise(async () => {
|
export const getChnCidrPromise = createMemoizedPromise(async () => {
|
||||||
const cidr = await traceAsync(
|
const cidr = await processLineFromReadline(await fetchRemoteTextByLine('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt'));
|
||||||
picocolors.gray('download chnroutes2'),
|
|
||||||
async () => processLineFromReadline(await fetchRemoteTextByLine('https://raw.githubusercontent.com/misakaio/chnroutes2/master/chnroutes.txt')),
|
|
||||||
picocolors.gray
|
|
||||||
);
|
|
||||||
return traceSync(
|
return traceSync(
|
||||||
picocolors.gray('processing chnroutes2'),
|
picocolors.gray('processing chnroutes2'),
|
||||||
() => exclude([...cidr, ...INCLUDE_CIDRS], EXCLUDE_CIDRS, true),
|
() => exclude([...cidr, ...INCLUDE_CIDRS], EXCLUDE_CIDRS, true),
|
||||||
@ -33,7 +29,11 @@ export const getChnCidrPromise = createMemoizedPromise(async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
export const buildChnCidr = task(import.meta.path, async (span) => {
|
export const buildChnCidr = task(import.meta.path, async (span) => {
|
||||||
const filteredCidr = await getChnCidrPromise();
|
const cidrPromise = getChnCidrPromise();
|
||||||
|
const peeked = Bun.peek(cidrPromise);
|
||||||
|
const filteredCidr: string[] = peeked === cidrPromise
|
||||||
|
? await span.traceChild('download chnroutes2').tracePromise(cidrPromise)
|
||||||
|
: (peeked as string[]);
|
||||||
|
|
||||||
// Can not use SHARED_DESCRIPTION here as different license
|
// Can not use SHARED_DESCRIPTION here as different license
|
||||||
const description = [
|
const description = [
|
||||||
|
|||||||
@ -27,13 +27,19 @@ export const buildDomesticRuleset = task(import.meta.path, async (span) => {
|
|||||||
'This file contains known addresses that are avaliable in the Mainland China.'
|
'This file contains known addresses that are avaliable in the Mainland China.'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const promise = getDomesticDomainsRulesetPromise();
|
||||||
|
const peeked = Bun.peek(promise);
|
||||||
|
const res: string[] = peeked === promise
|
||||||
|
? await promise
|
||||||
|
: (peeked as string[]);
|
||||||
|
|
||||||
return Promise.all([
|
return Promise.all([
|
||||||
createRuleset(
|
createRuleset(
|
||||||
span,
|
span,
|
||||||
'Sukka\'s Ruleset - Domestic Domains',
|
'Sukka\'s Ruleset - Domestic Domains',
|
||||||
rulesetDescription,
|
rulesetDescription,
|
||||||
new Date(),
|
new Date(),
|
||||||
await getDomesticDomainsRulesetPromise(),
|
res,
|
||||||
'ruleset',
|
'ruleset',
|
||||||
path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'),
|
path.resolve(import.meta.dir, '../List/non_ip/domestic.conf'),
|
||||||
path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt')
|
path.resolve(import.meta.dir, '../Clash/non_ip/domestic.txt')
|
||||||
|
|||||||
@ -24,24 +24,21 @@ const BLACKLIST = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
export const getMicrosoftCdnRulesetPromise = createMemoizedPromise(async () => {
|
export const getMicrosoftCdnRulesetPromise = createMemoizedPromise(async () => {
|
||||||
const set = await traceAsync('fetch accelerated-domains.china.conf', async () => {
|
// First trie is to find the microsoft domains that matches probe domains
|
||||||
// First trie is to find the microsoft domains that matches probe domains
|
const trie = createTrie();
|
||||||
const trie = createTrie();
|
for await (const line of await fetchRemoteTextByLine('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf')) {
|
||||||
for await (const line of await fetchRemoteTextByLine('https://raw.githubusercontent.com/felixonmars/dnsmasq-china-list/master/accelerated-domains.china.conf')) {
|
if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
|
||||||
if (line.startsWith('server=/') && line.endsWith('/114.114.114.114')) {
|
const domain = line.slice(8, -16);
|
||||||
const domain = line.slice(8, -16);
|
trie.add(domain);
|
||||||
trie.add(domain);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return new Set(PROBE_DOMAINS.flatMap(domain => trie.find(domain)));
|
}
|
||||||
});
|
const set = new Set(PROBE_DOMAINS.flatMap(domain => trie.find(domain)));
|
||||||
|
|
||||||
// Second trie is to remove blacklisted domains
|
// Second trie is to remove blacklisted domains
|
||||||
const trie2 = createTrie(set);
|
const trie2 = createTrie(set);
|
||||||
const black = BLACKLIST.flatMap(domain => trie2.find(domain, true));
|
BLACKLIST.forEach(black => {
|
||||||
for (let i = 0, len = black.length; i < len; i++) {
|
trie2.substractSetInPlaceFromFound(black, set);
|
||||||
set.delete(black[i]);
|
});
|
||||||
}
|
|
||||||
|
|
||||||
return Array.from(set).map(d => `DOMAIN-SUFFIX,${d}`).concat(WHITELIST);
|
return Array.from(set).map(d => `DOMAIN-SUFFIX,${d}`).concat(WHITELIST);
|
||||||
});
|
});
|
||||||
@ -56,12 +53,18 @@ export const buildMicrosoftCdn = task(import.meta.path, async (span) => {
|
|||||||
' - https://github.com/felixonmars/dnsmasq-china-list'
|
' - https://github.com/felixonmars/dnsmasq-china-list'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const promise = getMicrosoftCdnRulesetPromise();
|
||||||
|
const peeked = Bun.peek(promise);
|
||||||
|
const res: string[] = peeked === promise
|
||||||
|
? await span.traceChild('get microsoft cdn domains').tracePromise(promise)
|
||||||
|
: (peeked as string[]);
|
||||||
|
|
||||||
return createRuleset(
|
return createRuleset(
|
||||||
span,
|
span,
|
||||||
'Sukka\'s Ruleset - Microsoft CDN',
|
'Sukka\'s Ruleset - Microsoft CDN',
|
||||||
description,
|
description,
|
||||||
new Date(),
|
new Date(),
|
||||||
await getMicrosoftCdnRulesetPromise(),
|
res,
|
||||||
'ruleset',
|
'ruleset',
|
||||||
path.resolve(import.meta.dir, '../List/non_ip/microsoft_cdn.conf'),
|
path.resolve(import.meta.dir, '../List/non_ip/microsoft_cdn.conf'),
|
||||||
path.resolve(import.meta.dir, '../Clash/non_ip/microsoft_cdn.txt')
|
path.resolve(import.meta.dir, '../Clash/non_ip/microsoft_cdn.txt')
|
||||||
|
|||||||
@ -18,9 +18,14 @@ import { getPhishingDomains } from './lib/get-phishing-domains';
|
|||||||
|
|
||||||
import * as SetHelpers from 'mnemonist/set';
|
import * as SetHelpers from 'mnemonist/set';
|
||||||
import { setAddFromArray } from './lib/set-add-from-array';
|
import { setAddFromArray } from './lib/set-add-from-array';
|
||||||
|
import type { PublicSuffixList } from '@gorhill/publicsuffixlist';
|
||||||
|
|
||||||
export const buildRejectDomainSet = task(import.meta.path, async (span) => {
|
export const buildRejectDomainSet = task(import.meta.path, async (span) => {
|
||||||
const gorhill = await getGorhillPublicSuffixPromise();
|
const gorhillPromise = getGorhillPublicSuffixPromise();
|
||||||
|
const gorhillPeeked = Bun.peek(gorhillPromise);
|
||||||
|
const gorhill: PublicSuffixList = gorhillPeeked === gorhillPromise
|
||||||
|
? await gorhillPromise
|
||||||
|
: (gorhillPeeked as PublicSuffixList);
|
||||||
|
|
||||||
/** Whitelists */
|
/** Whitelists */
|
||||||
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
||||||
@ -105,7 +110,7 @@ export const buildRejectDomainSet = task(import.meta.path, async (span) => {
|
|||||||
const trie = createTrie(domainSets);
|
const trie = createTrie(domainSets);
|
||||||
|
|
||||||
domainSuffixSet.forEach(suffix => {
|
domainSuffixSet.forEach(suffix => {
|
||||||
trie.remove(suffix);
|
domainSets.delete(suffix);
|
||||||
trie.substractSetInPlaceFromFound(suffix, domainSets);
|
trie.substractSetInPlaceFromFound(suffix, domainSets);
|
||||||
});
|
});
|
||||||
filterRuleWhitelistDomainSets.forEach(suffix => {
|
filterRuleWhitelistDomainSets.forEach(suffix => {
|
||||||
|
|||||||
@ -12,14 +12,14 @@ import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
|
|||||||
import picocolors from 'picocolors';
|
import picocolors from 'picocolors';
|
||||||
import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
|
import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
|
||||||
import { processLine } from './lib/process-line';
|
import { processLine } from './lib/process-line';
|
||||||
import { TTL, deserializeArray, fsCache, serializeArray } from './lib/cache-filesystem';
|
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||||
import { createMemoizedPromise } from './lib/memo-promise';
|
import { createMemoizedPromise } from './lib/memo-promise';
|
||||||
|
|
||||||
import * as SetHelpers from 'mnemonist/set';
|
import * as SetHelpers from 'mnemonist/set';
|
||||||
|
|
||||||
const s = new Sema(2);
|
const s = new Sema(2);
|
||||||
|
|
||||||
const latestTopUserAgentsPromise = fsCache.apply(
|
const latestTopUserAgentsPromise = fsFetchCache.apply(
|
||||||
'https://unpkg.com/top-user-agents@latest/src/desktop.json',
|
'https://unpkg.com/top-user-agents@latest/src/desktop.json',
|
||||||
() => fetchWithRetry('https://unpkg.com/top-user-agents@latest/src/desktop.json')
|
() => fetchWithRetry('https://unpkg.com/top-user-agents@latest/src/desktop.json')
|
||||||
.then(res => res.json<string[]>())
|
.then(res => res.json<string[]>())
|
||||||
@ -39,7 +39,7 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
|
|||||||
try {
|
try {
|
||||||
const randomUserAgent = topUserAgents[Math.floor(Math.random() * topUserAgents.length)];
|
const randomUserAgent = topUserAgents[Math.floor(Math.random() * topUserAgents.length)];
|
||||||
|
|
||||||
return await fsCache.apply(
|
return await fsFetchCache.apply(
|
||||||
url,
|
url,
|
||||||
() => s.acquire().then(() => fetchWithRetry(url, {
|
() => s.acquire().then(() => fetchWithRetry(url, {
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@ -33,7 +33,11 @@ export const getTelegramCIDRPromise = createMemoizedPromise(async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
export const buildTelegramCIDR = task(import.meta.path, async (span) => {
|
export const buildTelegramCIDR = task(import.meta.path, async (span) => {
|
||||||
const { date, results } = await getTelegramCIDRPromise();
|
const promise = getTelegramCIDRPromise();
|
||||||
|
const peeked = Bun.peek(promise);
|
||||||
|
const { date, results } = peeked === promise
|
||||||
|
? await span.traceChild('get telegram cidr').tracePromise(promise)
|
||||||
|
: (peeked as { date: Date, results: string[] });
|
||||||
|
|
||||||
if (results.length === 0) {
|
if (results.length === 0) {
|
||||||
throw new Error('Failed to fetch data!');
|
throw new Error('Failed to fetch data!');
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
import { TTL, fsCache } from './lib/cache-filesystem';
|
import { TTL, fsFetchCache } from './lib/cache-filesystem';
|
||||||
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
||||||
import { createMemoizedPromise } from './lib/memo-promise';
|
import { createMemoizedPromise } from './lib/memo-promise';
|
||||||
import { traceAsync } from './lib/trace-runner';
|
import { traceAsync } from './lib/trace-runner';
|
||||||
|
|
||||||
export const getPublicSuffixListTextPromise = createMemoizedPromise(() => traceAsync(
|
export const getPublicSuffixListTextPromise = createMemoizedPromise(() => traceAsync(
|
||||||
'obtain public_suffix_list',
|
'obtain public_suffix_list',
|
||||||
() => fsCache.apply(
|
() => fsFetchCache.apply(
|
||||||
'https://publicsuffix.org/list/public_suffix_list.dat',
|
'https://publicsuffix.org/list/public_suffix_list.dat',
|
||||||
() => fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit).then(r => r.text()),
|
() => fetchWithRetry('https://publicsuffix.org/list/public_suffix_list.dat', defaultRequestInit).then(r => r.text()),
|
||||||
{
|
{
|
||||||
|
|||||||
@ -158,9 +158,21 @@ export class Cache {
|
|||||||
let value: T;
|
let value: T;
|
||||||
if (cached == null) {
|
if (cached == null) {
|
||||||
console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`));
|
console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`));
|
||||||
value = await fn();
|
|
||||||
|
|
||||||
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
||||||
|
|
||||||
|
const promise = fn();
|
||||||
|
const peeked = Bun.peek(promise);
|
||||||
|
|
||||||
|
if (peeked === promise) {
|
||||||
|
return promise.then((value) => {
|
||||||
|
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
||||||
|
this.set(key, serializer(value), ttl);
|
||||||
|
return value;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
value = peeked as T;
|
||||||
this.set(key, serializer(value), ttl);
|
this.set(key, serializer(value), ttl);
|
||||||
} else {
|
} else {
|
||||||
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
|
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
|
||||||
@ -168,6 +180,7 @@ export class Cache {
|
|||||||
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
|
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
|
||||||
value = deserializer(cached);
|
value = deserializer(cached);
|
||||||
}
|
}
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -13,6 +13,7 @@ import type { Span } from '../trace';
|
|||||||
|
|
||||||
const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null
|
const DEBUG_DOMAIN_TO_FIND: string | null = null; // example.com | null
|
||||||
let foundDebugDomain = false;
|
let foundDebugDomain = false;
|
||||||
|
const temporaryBypass = DEBUG_DOMAIN_TO_FIND !== null;
|
||||||
|
|
||||||
export function processDomainLists(span: Span, domainListsUrl: string, includeAllSubDomain = false, ttl: number | null = null) {
|
export function processDomainLists(span: Span, domainListsUrl: string, includeAllSubDomain = false, ttl: number | null = null) {
|
||||||
return span.traceChild(`process domainlist: ${domainListsUrl}`).traceAsyncFn(() => fsFetchCache.apply(
|
return span.traceChild(`process domainlist: ${domainListsUrl}`).traceAsyncFn(() => fsFetchCache.apply(
|
||||||
@ -38,7 +39,7 @@ export function processDomainLists(span: Span, domainListsUrl: string, includeAl
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
ttl,
|
ttl,
|
||||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
temporaryBypass,
|
||||||
serializer: serializeSet,
|
serializer: serializeSet,
|
||||||
deserializer: deserializeSet
|
deserializer: deserializeSet
|
||||||
}
|
}
|
||||||
@ -97,7 +98,7 @@ export function processHosts(span: Span, hostsUrl: string, mirrors: string[] | n
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
ttl,
|
ttl,
|
||||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
temporaryBypass,
|
||||||
serializer: serializeSet,
|
serializer: serializeSet,
|
||||||
deserializer: deserializeSet
|
deserializer: deserializeSet
|
||||||
}
|
}
|
||||||
@ -131,7 +132,11 @@ export async function processFilterRules(
|
|||||||
|
|
||||||
const warningMessages: string[] = [];
|
const warningMessages: string[] = [];
|
||||||
|
|
||||||
const gorhill = await getGorhillPublicSuffixPromise();
|
const gorhillPromise = getGorhillPublicSuffixPromise();
|
||||||
|
const peekedGorhill = Bun.peek(gorhillPromise);
|
||||||
|
const gorhill = peekedGorhill === gorhillPromise
|
||||||
|
? await span.traceChild('get gorhill').tracePromise(gorhillPromise)
|
||||||
|
: (peekedGorhill as PublicSuffixList);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} line
|
* @param {string} line
|
||||||
@ -215,7 +220,7 @@ export async function processFilterRules(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
ttl,
|
ttl,
|
||||||
temporaryBypass: DEBUG_DOMAIN_TO_FIND !== null,
|
temporaryBypass,
|
||||||
serializer: JSON.stringify,
|
serializer: JSON.stringify,
|
||||||
deserializer: JSON.parse
|
deserializer: JSON.parse
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,6 +28,7 @@ export interface Span {
|
|||||||
readonly traceChild: (name: string) => Span,
|
readonly traceChild: (name: string) => Span,
|
||||||
readonly traceSyncFn: <T>(fn: (span: Span) => T) => T,
|
readonly traceSyncFn: <T>(fn: (span: Span) => T) => T,
|
||||||
readonly traceAsyncFn: <T>(fn: (span: Span) => T | Promise<T>) => Promise<T>,
|
readonly traceAsyncFn: <T>(fn: (span: Span) => T | Promise<T>) => Promise<T>,
|
||||||
|
readonly tracePromise: <T>(promise: Promise<T>) => Promise<T>,
|
||||||
readonly traceResult: TraceResult
|
readonly traceResult: TraceResult
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,6 +84,13 @@ export const createSpan = (name: string, parentTraceResult?: TraceResult): Span
|
|||||||
},
|
},
|
||||||
get traceResult() {
|
get traceResult() {
|
||||||
return curTraceResult;
|
return curTraceResult;
|
||||||
|
},
|
||||||
|
async tracePromise<T>(promise: Promise<T>): Promise<T> {
|
||||||
|
try {
|
||||||
|
return await promise;
|
||||||
|
} finally {
|
||||||
|
span.stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user