mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-12 01:00:34 +08:00
Refactor: drop all Bun compatability
This commit is contained in:
parent
f30f9774c3
commit
bb65a4180c
@ -17,7 +17,7 @@ export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsFetchCach
|
||||
}
|
||||
));
|
||||
|
||||
export const buildAppleCdn = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildAppleCdn = task(require.main === module, __filename)(async (span) => {
|
||||
const res: string[] = await span.traceChildPromise('get apple cdn domains', getAppleCdnDomainsPromise());
|
||||
|
||||
const description = [
|
||||
|
||||
@ -48,7 +48,7 @@ const getS3OSSDomainsPromise = (async (): Promise<string[]> => {
|
||||
return Array.from(S3OSSDomains);
|
||||
})();
|
||||
|
||||
export const buildCdnDownloadConf = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildCdnDownloadConf = task(require.main === module, __filename)(async (span) => {
|
||||
const [
|
||||
S3OSSDomains,
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ export const getChnCidrPromise = createMemoizedPromise(async () => {
|
||||
return exclude(cidr, NON_CN_CIDR_INCLUDED_IN_CHNROUTE, true);
|
||||
});
|
||||
|
||||
export const buildChnCidr = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildChnCidr = task(require.main === module, __filename)(async (span) => {
|
||||
const filteredCidr = await span.traceChildAsync('download chnroutes2', getChnCidrPromise);
|
||||
|
||||
// Can not use SHARED_DESCRIPTION here as different license
|
||||
|
||||
@ -7,7 +7,7 @@ import { task } from './trace';
|
||||
const outputSurgeDir = path.resolve(__dirname, '../List');
|
||||
const outputClashDir = path.resolve(__dirname, '../Clash');
|
||||
|
||||
export const buildCloudMounterRules = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildCloudMounterRules = task(require.main === module, __filename)(async (span) => {
|
||||
// AND,((SRC-IP,192.168.1.110), (DOMAIN, example.com))
|
||||
|
||||
const results = DOMAINS.flatMap(domain => {
|
||||
|
||||
@ -21,7 +21,7 @@ const outputClashDir = path.resolve(__dirname, '../Clash');
|
||||
|
||||
const domainsetSrcFolder = 'domainset' + path.sep;
|
||||
|
||||
export const buildCommon = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildCommon = task(require.main === module, __filename)(async (span) => {
|
||||
const promises: Array<Promise<unknown>> = [];
|
||||
|
||||
const paths = await new Fdir()
|
||||
|
||||
@ -11,7 +11,7 @@ const DEPRECATED_FILES = [
|
||||
const outputSurgeDir = path.resolve(__dirname, '../List');
|
||||
const outputClashDir = path.resolve(__dirname, '../Clash');
|
||||
|
||||
export const buildDeprecateFiles = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)((span) => span.traceChildAsync('create deprecated files', async (childSpan) => {
|
||||
export const buildDeprecateFiles = task(require.main === module, __filename)((span) => span.traceChildAsync('create deprecated files', async (childSpan) => {
|
||||
const promises: Array<Promise<unknown>> = [];
|
||||
|
||||
for (const [filePath, description] of DEPRECATED_FILES) {
|
||||
|
||||
@ -29,7 +29,7 @@ export const getDomesticAndDirectDomainsRulesetPromise = createMemoizedPromise(a
|
||||
return [domestics, directs, lans] as const;
|
||||
});
|
||||
|
||||
export const buildDomesticRuleset = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildDomesticRuleset = task(require.main === module, __filename)(async (span) => {
|
||||
const res = await getDomesticAndDirectDomainsRulesetPromise();
|
||||
|
||||
const dataset = Object.entries(DOMESTICS);
|
||||
|
||||
@ -7,7 +7,7 @@ import { NON_CN_CIDR_INCLUDED_IN_CHNROUTE, RESERVED_IPV4_CIDR } from './constant
|
||||
|
||||
import { writeFile } from './lib/bun';
|
||||
|
||||
export const buildInternalReverseChnCIDR = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async () => {
|
||||
export const buildInternalReverseChnCIDR = task(require.main === module, __filename)(async () => {
|
||||
const cidr = await getChnCidrPromise();
|
||||
|
||||
const reversedCidr = merge(
|
||||
|
||||
@ -44,7 +44,7 @@ export const getMicrosoftCdnRulesetPromise = createMemoizedPromise(async () => {
|
||||
.concat(WHITELIST);
|
||||
});
|
||||
|
||||
export const buildMicrosoftCdn = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildMicrosoftCdn = task(require.main === module, __filename)(async (span) => {
|
||||
const description = [
|
||||
...SHARED_DESCRIPTION,
|
||||
'',
|
||||
|
||||
@ -5,7 +5,6 @@ import { task } from './trace';
|
||||
import { treeDir } from './lib/tree-dir';
|
||||
import type { TreeType, TreeTypeArray } from './lib/tree-dir';
|
||||
import { fdir as Fdir } from 'fdir';
|
||||
import { sort } from './lib/timsort';
|
||||
|
||||
import Trie from 'mnemonist/trie';
|
||||
import { writeFile } from './lib/bun';
|
||||
@ -23,7 +22,7 @@ const folderAndFilesToBeDeployed = [
|
||||
'LICENSE'
|
||||
];
|
||||
|
||||
export const buildPublic = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildPublic = task(require.main === module, __filename)(async (span) => {
|
||||
fs.mkdirSync(publicPath, { recursive: true });
|
||||
|
||||
await span
|
||||
@ -89,7 +88,7 @@ const html = (string: TemplateStringsArray, ...values: any[]) => string.reduce((
|
||||
|
||||
const walk = (tree: TreeTypeArray) => {
|
||||
let result = '';
|
||||
sort(tree, prioritySorter);
|
||||
tree.sort(prioritySorter);
|
||||
for (let i = 0, len = tree.length; i < len; i++) {
|
||||
const entry = tree[i];
|
||||
if (entry.type === 'directory') {
|
||||
|
||||
@ -18,11 +18,10 @@ import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { getPhishingDomains } from './lib/get-phishing-domains';
|
||||
|
||||
import { setAddFromArray, setAddFromArrayCurried } from './lib/set-add-from-array';
|
||||
import { sort } from './lib/timsort';
|
||||
|
||||
const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'));
|
||||
|
||||
export const buildRejectDomainSet = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildRejectDomainSet = task(require.main === module, __filename)(async (span) => {
|
||||
/** Whitelists */
|
||||
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
||||
|
||||
@ -171,7 +170,7 @@ export const buildRejectDomainSet = task(typeof Bun !== 'undefined' ? Bun.main =
|
||||
return acc;
|
||||
}, new Map());
|
||||
|
||||
return sort(Array.from(statMap.entries()).filter(a => a[1] > 9), (a, b) => (b[1] - a[1]) || a[0].localeCompare(b[0]));
|
||||
return Array.from(statMap.entries()).filter(a => a[1] > 9).sort((a, b) => (b[1] - a[1]) || a[0].localeCompare(b[0]));
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
|
||||
@ -67,7 +67,7 @@ const getBotNetFilterIPsPromise = fsFetchCache.apply(
|
||||
|
||||
const localRejectIPSourcesPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/ip/reject.conf'));
|
||||
|
||||
export const buildRejectIPList = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildRejectIPList = task(require.main === module, __filename)(async (span) => {
|
||||
const result = await localRejectIPSourcesPromise;
|
||||
|
||||
const bogusNxDomainIPs = await span.traceChildPromise('get bogus nxdomain ips', getBogusNxDomainIPsPromise);
|
||||
|
||||
@ -43,7 +43,7 @@ const HOSTNAMES = [
|
||||
'*.battlenet.com'
|
||||
];
|
||||
|
||||
export const buildAlwaysRealIPModule = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildAlwaysRealIPModule = task(require.main === module, __filename)(async (span) => {
|
||||
// Intranet, Router Setup, and mant more
|
||||
const dataset = [Object.entries(DIRECTS), Object.entries(LANS)];
|
||||
const surge = dataset.flatMap(data => data.flatMap(([, { domains }]) => domains.flatMap((domain) => [`*.${domain}`, domain])));
|
||||
|
||||
@ -120,7 +120,7 @@ const REDIRECT_FAKEWEBSITES = [
|
||||
['zbrushcn.com', 'https://www.maxon.net/en/zbrush']
|
||||
] as const;
|
||||
|
||||
export const buildRedirectModule = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildRedirectModule = task(require.main === module, __filename)(async (span) => {
|
||||
const domains = Array.from(new Set([
|
||||
...REDIRECT_MIRROR.map(([from]) => getHostname(from, { detectIp: false })),
|
||||
...REDIRECT_FAKEWEBSITES.flatMap(([from]) => [from, `www.${from}`])
|
||||
|
||||
@ -13,7 +13,7 @@ import { readFileIntoProcessedArray } from './lib/fetch-text-by-line';
|
||||
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
|
||||
|
||||
import { createTrie } from './lib/trie';
|
||||
import { peek } from './lib/bun';
|
||||
import { peek, track } from './lib/bun';
|
||||
|
||||
const s = new Sema(2);
|
||||
|
||||
@ -82,7 +82,7 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
|
||||
}
|
||||
};
|
||||
|
||||
export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildSpeedtestDomainSet = task(require.main === module, __filename)(async (span) => {
|
||||
const domainTrie = createTrie(
|
||||
[
|
||||
// speedtest.net
|
||||
@ -226,13 +226,13 @@ export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.mai
|
||||
'Brazil',
|
||||
'Turkey'
|
||||
]).reduce<Record<string, Promise<void>>>((pMap, keyword) => {
|
||||
pMap[keyword] = span.traceChildAsync(`fetch speedtest endpoints: ${keyword}`, () => querySpeedtestApi(keyword)).then(hostnameGroup => {
|
||||
pMap[keyword] = track(span.traceChildAsync(`fetch speedtest endpoints: ${keyword}`, () => querySpeedtestApi(keyword)).then(hostnameGroup => {
|
||||
return hostnameGroup.forEach(hostname => {
|
||||
if (hostname) {
|
||||
domainTrie.add(hostname);
|
||||
}
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
return pMap;
|
||||
}, {});
|
||||
@ -240,7 +240,7 @@ export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.mai
|
||||
const timer = setTimeout(() => {
|
||||
console.error(picocolors.red('Task timeout!'));
|
||||
Object.entries(pMap).forEach(([name, p]) => {
|
||||
console.log(`[${name}]`, peek.status(p));
|
||||
console.log(`[${name}]`, peek(p));
|
||||
});
|
||||
|
||||
resolve();
|
||||
|
||||
@ -28,7 +28,7 @@ const removeNoResolved = (line: string) => line.replace(',no-resolve', '');
|
||||
/**
|
||||
* This only generates a simplified version, for under-used users only.
|
||||
*/
|
||||
export const buildSSPanelUIMAppProfile = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildSSPanelUIMAppProfile = task(require.main === module, __filename)(async (span) => {
|
||||
const [
|
||||
[domesticDomains, directDomains, lanDomains],
|
||||
appleCdnDomains,
|
||||
|
||||
@ -50,7 +50,7 @@ export const createRulesetForStreamService = (span: Span, fileId: string, title:
|
||||
]));
|
||||
};
|
||||
|
||||
export const buildStreamService = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildStreamService = task(require.main === module, __filename)(async (span) => {
|
||||
return Promise.all([
|
||||
createRulesetForStreamService(span, 'stream', 'All', ALL),
|
||||
createRulesetForStreamService(span, 'stream_us', 'North America', NORTH_AMERICA),
|
||||
|
||||
@ -32,7 +32,7 @@ export const getTelegramCIDRPromise = createMemoizedPromise(async () => {
|
||||
return { date, results };
|
||||
});
|
||||
|
||||
export const buildTelegramCIDR = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const buildTelegramCIDR = task(require.main === module, __filename)(async (span) => {
|
||||
const { date, results } = await span.traceChildAsync('get telegram cidr', getTelegramCIDRPromise);
|
||||
|
||||
if (results.length === 0) {
|
||||
|
||||
@ -15,7 +15,7 @@ const ASSETS_LIST = {
|
||||
|
||||
const mockDir = path.resolve(__dirname, '../Mock');
|
||||
|
||||
export const downloadMockAssets = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map(
|
||||
export const downloadMockAssets = task(require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map(
|
||||
([filename, url]) => span
|
||||
.traceChild(url)
|
||||
.traceAsyncFn(() => fetchWithRetry(url).then(res => {
|
||||
|
||||
@ -13,7 +13,7 @@ import { Readable } from 'stream';
|
||||
const IS_READING_BUILD_OUTPUT = 1 << 2;
|
||||
const ALL_FILES_EXISTS = 1 << 3;
|
||||
|
||||
export const downloadPreviousBuild = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => {
|
||||
export const downloadPreviousBuild = task(require.main === module, __filename)(async (span) => {
|
||||
const buildOutputList: string[] = [];
|
||||
|
||||
let flag = 1 | ALL_FILES_EXISTS;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
console.log('Version:', process.version, typeof Bun !== 'undefined' ? Bun.revision : '');
|
||||
console.log('Version:', process.version);
|
||||
|
||||
import { downloadPreviousBuild } from './download-previous-build';
|
||||
import { buildCommon } from './build-common';
|
||||
|
||||
@ -2,19 +2,29 @@ import { dirname } from 'path';
|
||||
import fs from 'fs';
|
||||
import fsp from 'fs/promises';
|
||||
|
||||
interface Peek {
|
||||
<T = undefined>(promise: T | Promise<T>): Promise<T> | T,
|
||||
status<T = undefined>(
|
||||
promise: T | Promise<T>,
|
||||
): 'pending' | 'fulfilled' | 'rejected' | 'unknown'
|
||||
const peekStatus = new WeakMap<Promise<any>, 'pending' | 'rejected' | 'fulfilled'>();
|
||||
export function track<T>(promise: Promise<T>): Promise<T> {
|
||||
// only set to pending if not already tracked
|
||||
if (!peekStatus.has(promise)) {
|
||||
peekStatus.set(promise, 'pending');
|
||||
}
|
||||
|
||||
// Observe the promise, saving the fulfillment in a closure scope.
|
||||
return promise.then(
|
||||
(v) => {
|
||||
peekStatus.set(promise, 'fulfilled');
|
||||
return v;
|
||||
},
|
||||
(e) => {
|
||||
peekStatus.set(promise, 'rejected');
|
||||
throw e;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const noopPeek = <T = undefined>(_: Promise<T>) => _;
|
||||
noopPeek.status = () => 'unknown';
|
||||
|
||||
export const peek: Peek = typeof Bun !== 'undefined'
|
||||
? Bun.peek
|
||||
: noopPeek as Peek;
|
||||
export function peek(promise: Promise<any>): 'pending' | 'rejected' | 'fulfilled' | 'unknown' {
|
||||
return peekStatus.get(promise) ?? 'unknown';
|
||||
}
|
||||
|
||||
interface Write {
|
||||
(
|
||||
@ -23,13 +33,11 @@ interface Write {
|
||||
): Promise<unknown>
|
||||
}
|
||||
|
||||
export const writeFile: Write = typeof Bun !== 'undefined'
|
||||
? Bun.write
|
||||
: (async (destination: string, input) => {
|
||||
const dir = dirname(destination);
|
||||
export const writeFile: Write = async (destination: string, input) => {
|
||||
const dir = dirname(destination);
|
||||
|
||||
if (!fs.existsSync(dir)) {
|
||||
await fsp.mkdir(dir, { recursive: true });
|
||||
}
|
||||
return fsp.writeFile(destination, input, { encoding: 'utf-8' });
|
||||
});
|
||||
if (!fs.existsSync(dir)) {
|
||||
await fsp.mkdir(dir, { recursive: true });
|
||||
}
|
||||
return fsp.writeFile(destination, input, { encoding: 'utf-8' });
|
||||
};
|
||||
|
||||
@ -5,7 +5,6 @@ import path from 'path';
|
||||
import { mkdirSync } from 'fs';
|
||||
import picocolors from 'picocolors';
|
||||
import { fastStringArrayJoin } from './misc';
|
||||
import { peek } from './bun';
|
||||
import { performance } from 'perf_hooks';
|
||||
|
||||
const identity = (x: any) => x;
|
||||
@ -178,32 +177,23 @@ export class Cache<S = string> {
|
||||
}
|
||||
|
||||
const cached = this.get(key);
|
||||
let value: T;
|
||||
if (cached == null) {
|
||||
console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`));
|
||||
|
||||
const serializer = 'serializer' in opt ? opt.serializer : identity;
|
||||
|
||||
const promise = fn();
|
||||
const peeked = peek(promise);
|
||||
|
||||
if (peeked === promise) {
|
||||
return promise.then((value) => {
|
||||
this.set(key, serializer(value), ttl);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
value = peeked as T;
|
||||
this.set(key, serializer(value), ttl);
|
||||
} else {
|
||||
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
|
||||
|
||||
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
|
||||
value = deserializer(cached);
|
||||
return promise.then((value) => {
|
||||
this.set(key, serializer(value), ttl);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
return value;
|
||||
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
|
||||
|
||||
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
|
||||
return deserializer(cached);
|
||||
}
|
||||
|
||||
destroy() {
|
||||
|
||||
@ -4,7 +4,6 @@ import picocolors from 'picocolors';
|
||||
import type { Span } from '../trace';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { sort } from './timsort';
|
||||
import { fastStringArrayJoin } from './misc';
|
||||
import { readFileByLine } from './fetch-text-by-line';
|
||||
import { writeFile } from './bun';
|
||||
@ -121,33 +120,31 @@ const sortTypeOrder: Record<string | typeof defaultSortTypeOrder, number> = {
|
||||
};
|
||||
// sort DOMAIN-SUFFIX and DOMAIN first, then DOMAIN-KEYWORD, then IP-CIDR and IP-CIDR6 if any
|
||||
export const sortRuleSet = (ruleSet: string[]) => {
|
||||
return sort(
|
||||
ruleSet.map((rule) => {
|
||||
const type = collectType(rule);
|
||||
if (!type) {
|
||||
return [10, rule] as const;
|
||||
return ruleSet.map((rule) => {
|
||||
const type = collectType(rule);
|
||||
if (!type) {
|
||||
return [10, rule] as const;
|
||||
}
|
||||
if (!(type in sortTypeOrder)) {
|
||||
return [sortTypeOrder[defaultSortTypeOrder], rule] as const;
|
||||
}
|
||||
if (type === 'URL-REGEX') {
|
||||
let extraWeight = 0;
|
||||
if (rule.includes('.+') || rule.includes('.*')) {
|
||||
extraWeight += 10;
|
||||
}
|
||||
if (!(type in sortTypeOrder)) {
|
||||
return [sortTypeOrder[defaultSortTypeOrder], rule] as const;
|
||||
if (rule.includes('|')) {
|
||||
extraWeight += 1;
|
||||
}
|
||||
if (type === 'URL-REGEX') {
|
||||
let extraWeight = 0;
|
||||
if (rule.includes('.+') || rule.includes('.*')) {
|
||||
extraWeight += 10;
|
||||
}
|
||||
if (rule.includes('|')) {
|
||||
extraWeight += 1;
|
||||
}
|
||||
|
||||
return [
|
||||
sortTypeOrder[type] + extraWeight,
|
||||
rule
|
||||
] as const;
|
||||
}
|
||||
return [sortTypeOrder[type], rule] as const;
|
||||
}),
|
||||
(a, b) => a[0] - b[0]
|
||||
).map(c => c[1]);
|
||||
return [
|
||||
sortTypeOrder[type] + extraWeight,
|
||||
rule
|
||||
] as const;
|
||||
}
|
||||
return [sortTypeOrder[type], rule] as const;
|
||||
}).sort((a, b) => a[0] - b[0])
|
||||
.map(c => c[1]);
|
||||
};
|
||||
|
||||
const MARK = 'this_ruleset_is_made_by_sukkaw.ruleset.skk.moe';
|
||||
|
||||
@ -1,12 +1,11 @@
|
||||
import fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import type { BunFile } from 'bun';
|
||||
import { fetchWithRetry, defaultRequestInit } from './fetch-retry';
|
||||
import type { FileHandle } from 'fs/promises';
|
||||
|
||||
import { TextLineStream } from './text-line-transform-stream';
|
||||
import { PolyfillTextDecoderStream } from './text-decoder-stream';
|
||||
import { TextDecoderStream as NodeTextDecoderStream } from 'stream/web';
|
||||
import type { ReadableStream } from 'stream/web';
|
||||
import { TextDecoderStream } from 'stream/web';
|
||||
import { processLine } from './process-line';
|
||||
|
||||
const enableTextLineStream = !!process.env.ENABLE_TEXT_LINE_STREAM;
|
||||
@ -39,33 +38,17 @@ async function *createTextLineAsyncIterableFromStreamSource(stream: ReadableStre
|
||||
}
|
||||
}
|
||||
|
||||
const getReadableStream = typeof Bun !== 'undefined'
|
||||
? (file: string | BunFile | FileHandle): ReadableStream => {
|
||||
if (typeof file === 'string') {
|
||||
return Bun.file(file).stream();
|
||||
}
|
||||
if ('writer' in file) {
|
||||
return file.stream();
|
||||
}
|
||||
return file.readableWebStream();
|
||||
const getReadableStream = (file: string | FileHandle): ReadableStream => {
|
||||
if (typeof file === 'string') {
|
||||
return Readable.toWeb(fs.createReadStream(file /* { encoding: 'utf-8' } */));
|
||||
}
|
||||
: (file: string | BunFile | FileHandle): ReadableStream => {
|
||||
if (typeof file === 'string') {
|
||||
return Readable.toWeb(fs.createReadStream(file /* { encoding: 'utf-8' } */));
|
||||
}
|
||||
if ('writer' in file) {
|
||||
return file.stream();
|
||||
}
|
||||
return file.readableWebStream();
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- On Bun, NodeTextDecoderStream is undefined
|
||||
const TextDecoderStream = NodeTextDecoderStream ?? PolyfillTextDecoderStream;
|
||||
return file.readableWebStream();
|
||||
};
|
||||
|
||||
// TODO: use FileHandle.readLine()
|
||||
export const readFileByLine: ((file: string | BunFile | FileHandle) => AsyncIterable<string>) = enableTextLineStream
|
||||
? (file: string | BunFile | FileHandle) => getReadableStream(file).pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream())
|
||||
: (file: string | BunFile | FileHandle) => createTextLineAsyncIterableFromStreamSource(getReadableStream(file));
|
||||
export const readFileByLine: ((file: string | FileHandle) => AsyncIterable<string>) = enableTextLineStream
|
||||
? (file: string | FileHandle) => getReadableStream(file).pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream())
|
||||
: (file: string | FileHandle) => createTextLineAsyncIterableFromStreamSource(getReadableStream(file));
|
||||
|
||||
const ensureResponseBody = (resp: Response) => {
|
||||
if (!resp.body) {
|
||||
@ -85,7 +68,7 @@ export function fetchRemoteTextByLine(url: string | URL) {
|
||||
return fetchWithRetry(url, defaultRequestInit).then(createReadlineInterfaceFromResponse);
|
||||
}
|
||||
|
||||
export async function readFileIntoProcessedArray(file: string | BunFile | FileHandle) {
|
||||
export async function readFileIntoProcessedArray(file: string | FileHandle) {
|
||||
const results = [];
|
||||
for await (const line of readFileByLine(file)) {
|
||||
if (processLine(line)) {
|
||||
|
||||
@ -4,13 +4,11 @@ import { createMemoizedPromise } from './memo-promise';
|
||||
import { getPublicSuffixListTextPromise } from './download-publicsuffixlist';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const customFetch = typeof Bun !== 'undefined'
|
||||
? (url: string | URL) => Promise.resolve(Bun.file(url))
|
||||
: async (url: string | URL) => {
|
||||
const filePath = fileURLToPath(url);
|
||||
const file = await fsp.readFile(filePath);
|
||||
return new Blob([file]) as any;
|
||||
};
|
||||
const customFetch = async (url: string | URL) => {
|
||||
const filePath = fileURLToPath(url);
|
||||
const file = await fsp.readFile(filePath);
|
||||
return new Blob([file]) as any;
|
||||
};
|
||||
|
||||
export const getGorhillPublicSuffixPromise = createMemoizedPromise(async () => {
|
||||
const [publicSuffixListDat, { default: gorhill }] = await Promise.all([
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
// (since it is hashes based). But the result is still deterministic, which is
|
||||
// enough when sorting.
|
||||
import * as tldts from 'tldts-experimental';
|
||||
import { sort } from './timsort';
|
||||
import { looseTldtsOpt } from '../constants/loose-tldts-opt';
|
||||
|
||||
export const compare = (a: string, b: string) => {
|
||||
@ -58,5 +57,5 @@ export const sortDomains = (
|
||||
return t;
|
||||
};
|
||||
|
||||
return sort(inputs, sorter);
|
||||
return inputs.sort(sorter);
|
||||
};
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
// This module is browser compatible.
|
||||
// Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun.
|
||||
|
||||
import { TransformStream } from 'stream/web';
|
||||
|
||||
interface TextLineStreamOptions {
|
||||
/** Allow splitting by solo \r */
|
||||
allowCR?: boolean
|
||||
|
||||
@ -1,956 +0,0 @@
|
||||
type Comparator<T> = (a: T, b: T) => number;
|
||||
|
||||
/**
|
||||
* Default minimum size of a run.
|
||||
*/
|
||||
const DEFAULT_MIN_MERGE = 32;
|
||||
|
||||
/**
|
||||
* Minimum ordered subsequece required to do galloping.
|
||||
*/
|
||||
const DEFAULT_MIN_GALLOPING = 7;
|
||||
|
||||
/**
|
||||
* Default tmp storage length. Can increase depending on the size of the
|
||||
* smallest run to merge.
|
||||
*/
|
||||
const DEFAULT_TMP_STORAGE_LENGTH = 256;
|
||||
|
||||
/**
|
||||
* Pre-computed powers of 10 for efficient lexicographic comparison of
|
||||
* small integers.
|
||||
*/
|
||||
const POWERS_OF_TEN = [1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9];
|
||||
|
||||
/**
|
||||
* Estimate the logarithm base 10 of a small integer.
|
||||
*
|
||||
* @param x - The integer to estimate the logarithm of.
|
||||
* @return {number} - The estimated logarithm of the integer.
|
||||
*/
|
||||
function log10(x: number): number {
|
||||
if (x < 1e5) {
|
||||
if (x < 1e2) {
|
||||
return x < 1e1 ? 0 : 1;
|
||||
}
|
||||
|
||||
if (x < 1e4) {
|
||||
return x < 1e3 ? 2 : 3;
|
||||
}
|
||||
|
||||
return 4;
|
||||
}
|
||||
|
||||
if (x < 1e7) {
|
||||
return x < 1e6 ? 5 : 6;
|
||||
}
|
||||
|
||||
if (x < 1e9) {
|
||||
return x < 1e8 ? 7 : 8;
|
||||
}
|
||||
|
||||
return 9;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default alphabetical comparison of items.
|
||||
*
|
||||
* @param a - First element to compare.
|
||||
* @param b - Second element to compare.
|
||||
* @return - A positive number if a.toString() > b.toString(), a
|
||||
* negative number if .toString() < b.toString(), 0 otherwise.
|
||||
*/
|
||||
function alphabeticalCompare(a: any, b: any): number {
|
||||
if (a === b) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (~~a === a && ~~b === b) {
|
||||
if (a === 0 || b === 0) {
|
||||
return a < b ? -1 : 1;
|
||||
}
|
||||
|
||||
if (a < 0 || b < 0) {
|
||||
if (b >= 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (a >= 0) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
a = -a;
|
||||
b = -b;
|
||||
}
|
||||
|
||||
const al = log10(a);
|
||||
const bl = log10(b);
|
||||
|
||||
let t = 0;
|
||||
|
||||
if (al < bl) {
|
||||
a *= POWERS_OF_TEN[bl - al - 1];
|
||||
b /= 10;
|
||||
t = -1;
|
||||
} else if (al > bl) {
|
||||
b *= POWERS_OF_TEN[al - bl - 1];
|
||||
a /= 10;
|
||||
t = 1;
|
||||
}
|
||||
|
||||
if (a === b) {
|
||||
return t;
|
||||
}
|
||||
|
||||
return a < b ? -1 : 1;
|
||||
}
|
||||
|
||||
const aStr = String(a);
|
||||
const bStr = String(b);
|
||||
|
||||
if (aStr === bStr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return aStr < bStr ? -1 : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute minimum run length for TimSort
|
||||
*
|
||||
* @param n - The size of the array to sort.
|
||||
*/
|
||||
function minRunLength(n: number) {
|
||||
let r = 0;
|
||||
|
||||
while (n >= DEFAULT_MIN_MERGE) {
|
||||
r |= (n & 1);
|
||||
n >>= 1;
|
||||
}
|
||||
|
||||
return n + r;
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the length of a monotonically ascending or strictly monotonically
|
||||
* descending sequence (run) starting at array[lo] in the range [lo, hi). If
|
||||
* the run is descending it is made ascending.
|
||||
*
|
||||
* @param array - The array to reverse.
|
||||
* @param lo - First element in the range (inclusive).
|
||||
* @param hi - Last element in the range.
|
||||
* @param compare - Item comparison function.
|
||||
* @return - The length of the run.
|
||||
*/
|
||||
function makeAscendingRun<T>(array: T[], lo: number, hi: number, compare: Comparator<T>): number {
|
||||
let runHi = lo + 1;
|
||||
|
||||
if (runHi === hi) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Descending
|
||||
if (compare(array[runHi++], array[lo]) < 0) {
|
||||
while (runHi < hi && compare(array[runHi], array[runHi - 1]) < 0) {
|
||||
runHi++;
|
||||
}
|
||||
|
||||
reverseRun(array, lo, runHi);
|
||||
// Ascending
|
||||
} else {
|
||||
while (runHi < hi && compare(array[runHi], array[runHi - 1]) >= 0) {
|
||||
runHi++;
|
||||
}
|
||||
}
|
||||
|
||||
return runHi - lo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reverse an array in the range [lo, hi).
|
||||
*
|
||||
* @param array - The array to reverse.
|
||||
* @param lo - First element in the range (inclusive).
|
||||
* @param hi - Last element in the range.
|
||||
*/
|
||||
function reverseRun<T>(array: T[], lo: number, hi: number) {
|
||||
hi--;
|
||||
|
||||
while (lo < hi) {
|
||||
const t = array[lo];
|
||||
array[lo++] = array[hi];
|
||||
array[hi--] = t;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the binary sort of the array in the range [lo, hi) where start is
|
||||
* the first element possibly out of order.
|
||||
*
|
||||
* @param array - The array to sort.
|
||||
* @param lo - First element in the range (inclusive).
|
||||
* @param hi - Last element in the range.
|
||||
* @param start - First element possibly out of order.
|
||||
* @param compare - Item comparison function.
|
||||
*/
|
||||
function binaryInsertionSort<T>(array: T[], lo: number, hi: number, start: number, compare: Comparator<T>) {
|
||||
if (start === lo) {
|
||||
start++;
|
||||
}
|
||||
|
||||
for (; start < hi; start++) {
|
||||
const pivot = array[start];
|
||||
|
||||
// Ranges of the array where pivot belongs
|
||||
let left = lo;
|
||||
let right = start;
|
||||
|
||||
/*
|
||||
* pivot >= array[i] for i in [lo, left)
|
||||
* pivot < array[i] for i in in [right, start)
|
||||
*/
|
||||
while (left < right) {
|
||||
const mid = (left + right) >>> 1;
|
||||
|
||||
if (compare(pivot, array[mid]) < 0) {
|
||||
right = mid;
|
||||
} else {
|
||||
left = mid + 1;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Move elements right to make room for the pivot. If there are elements
|
||||
* equal to pivot, left points to the first slot after them: this is also
|
||||
* a reason for which TimSort is stable
|
||||
*/
|
||||
let n = start - left;
|
||||
// Switch is just an optimization for small arrays
|
||||
switch (n) {
|
||||
case 3:
|
||||
array[left + 3] = array[left + 2];
|
||||
/* falls through */
|
||||
case 2:
|
||||
array[left + 2] = array[left + 1];
|
||||
/* falls through */
|
||||
case 1:
|
||||
array[left + 1] = array[left];
|
||||
break;
|
||||
default:
|
||||
while (n > 0) {
|
||||
array[left + n] = array[left + n - 1];
|
||||
n--;
|
||||
}
|
||||
}
|
||||
|
||||
array[left] = pivot;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the position at which to insert a value in a sorted range. If the range
|
||||
* contains elements equal to the value the leftmost element index is returned
|
||||
* (for stability).
|
||||
*
|
||||
* @param value - Value to insert.
|
||||
* @param array - The array in which to insert value.
|
||||
* @param start - First element in the range.
|
||||
* @param length - Length of the range.
|
||||
* @param hint - The index at which to begin the search.
|
||||
* @param compare - Item comparison function.
|
||||
* @return - The index where to insert value.
|
||||
*/
|
||||
function gallopLeft<T>(value: T, array: T[], start: number, length: number, hint: number, compare: Comparator<T>): number {
|
||||
let lastOffset = 0;
|
||||
let maxOffset = 0;
|
||||
let offset = 1;
|
||||
|
||||
if (compare(value, array[start + hint]) > 0) {
|
||||
maxOffset = length - hint;
|
||||
|
||||
while (offset < maxOffset && compare(value, array[start + hint + offset]) > 0) {
|
||||
lastOffset = offset;
|
||||
offset = (offset << 1) + 1;
|
||||
|
||||
if (offset <= 0) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > maxOffset) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
|
||||
// Make offsets relative to start
|
||||
lastOffset += hint;
|
||||
offset += hint;
|
||||
|
||||
// value <= array[start + hint]
|
||||
} else {
|
||||
maxOffset = hint + 1;
|
||||
while (offset < maxOffset && compare(value, array[start + hint - offset]) <= 0) {
|
||||
lastOffset = offset;
|
||||
offset = (offset << 1) + 1;
|
||||
|
||||
if (offset <= 0) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
}
|
||||
if (offset > maxOffset) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
|
||||
// Make offsets relative to start
|
||||
const tmp = lastOffset;
|
||||
lastOffset = hint - offset;
|
||||
offset = hint - tmp;
|
||||
}
|
||||
|
||||
/*
|
||||
* Now array[start+lastOffset] < value <= array[start+offset], so value
|
||||
* belongs somewhere in the range (start + lastOffset, start + offset]. Do a
|
||||
* binary search, with invariant array[start + lastOffset - 1] < value <=
|
||||
* array[start + offset].
|
||||
*/
|
||||
lastOffset++;
|
||||
while (lastOffset < offset) {
|
||||
const m = lastOffset + ((offset - lastOffset) >>> 1);
|
||||
|
||||
if (compare(value, array[start + m]) > 0) {
|
||||
lastOffset = m + 1;
|
||||
} else {
|
||||
offset = m;
|
||||
}
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the position at which to insert a value in a sorted range. If the range
|
||||
* contains elements equal to the value the rightmost element index is returned
|
||||
* (for stability).
|
||||
*
|
||||
* @param value - Value to insert.
|
||||
* @param array - The array in which to insert value.
|
||||
* @param start - First element in the range.
|
||||
* @param length - Length of the range.
|
||||
* @param hint - The index at which to begin the search.
|
||||
* @param compare - Item comparison function.
|
||||
* @return - The index where to insert value.
|
||||
*/
|
||||
function gallopRight<T>(value: T, array: T[], start: number, length: number, hint: number, compare: Comparator<T>): number {
|
||||
let lastOffset = 0;
|
||||
let maxOffset = 0;
|
||||
let offset = 1;
|
||||
|
||||
if (compare(value, array[start + hint]) < 0) {
|
||||
maxOffset = hint + 1;
|
||||
|
||||
while (offset < maxOffset && compare(value, array[start + hint - offset]) < 0) {
|
||||
lastOffset = offset;
|
||||
offset = (offset << 1) + 1;
|
||||
|
||||
if (offset <= 0) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > maxOffset) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
|
||||
// Make offsets relative to start
|
||||
const tmp = lastOffset;
|
||||
lastOffset = hint - offset;
|
||||
offset = hint - tmp;
|
||||
|
||||
// value >= array[start + hint]
|
||||
} else {
|
||||
maxOffset = length - hint;
|
||||
|
||||
while (offset < maxOffset && compare(value, array[start + hint + offset]) >= 0) {
|
||||
lastOffset = offset;
|
||||
offset = (offset << 1) + 1;
|
||||
|
||||
if (offset <= 0) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > maxOffset) {
|
||||
offset = maxOffset;
|
||||
}
|
||||
|
||||
// Make offsets relative to start
|
||||
lastOffset += hint;
|
||||
offset += hint;
|
||||
}
|
||||
|
||||
/*
|
||||
* Now array[start+lastOffset] < value <= array[start+offset], so value
|
||||
* belongs somewhere in the range (start + lastOffset, start + offset]. Do a
|
||||
* binary search, with invariant array[start + lastOffset - 1] < value <=
|
||||
* array[start + offset].
|
||||
*/
|
||||
lastOffset++;
|
||||
|
||||
while (lastOffset < offset) {
|
||||
const m = lastOffset + ((offset - lastOffset) >>> 1);
|
||||
|
||||
if (compare(value, array[start + m]) < 0) {
|
||||
offset = m;
|
||||
} else {
|
||||
lastOffset = m + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
class TimSort<T> {
|
||||
tmp: T[];
|
||||
minGallop = DEFAULT_MIN_GALLOPING;
|
||||
length = 0;
|
||||
tmpStorageLength = DEFAULT_TMP_STORAGE_LENGTH;
|
||||
stackLength = 0;
|
||||
runStart: number[];
|
||||
runLength: number[];
|
||||
stackSize = 0;
|
||||
|
||||
constructor(public array: T[], public compare: Comparator<T>) {
|
||||
this.length = array.length;
|
||||
|
||||
if (this.length < 2 * DEFAULT_TMP_STORAGE_LENGTH) {
|
||||
this.tmpStorageLength = this.length >>> 1;
|
||||
}
|
||||
|
||||
this.tmp = new Array(this.tmpStorageLength);
|
||||
|
||||
if (this.length < 120) {
|
||||
this.stackLength = 5;
|
||||
} else if (this.length < 1542) {
|
||||
this.stackLength = 10;
|
||||
} else if (this.length < 119151) {
|
||||
this.stackLength = 19;
|
||||
} else {
|
||||
this.stackLength = 40;
|
||||
}
|
||||
|
||||
this.runStart = new Array(this.stackLength);
|
||||
this.runLength = new Array(this.stackLength);
|
||||
}
|
||||
|
||||
/**
|
||||
* Push a new run on TimSort's stack.
|
||||
*
|
||||
* @param runStart - Start index of the run in the original array.
|
||||
* @param runLength - Length of the run;
|
||||
*/
|
||||
pushRun(runStart: number, runLength: number) {
|
||||
this.runStart[this.stackSize] = runStart;
|
||||
this.runLength[this.stackSize] = runLength;
|
||||
this.stackSize += 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge runs on TimSort's stack so that the following holds for all i:
|
||||
* 1) runLength[i - 3] > runLength[i - 2] + runLength[i - 1]
|
||||
* 2) runLength[i - 2] > runLength[i - 1]
|
||||
*/
|
||||
mergeRuns() {
|
||||
while (this.stackSize > 1) {
|
||||
let n = this.stackSize - 2;
|
||||
|
||||
if ((n >= 1
|
||||
&& this.runLength[n - 1] <= this.runLength[n] + this.runLength[n + 1])
|
||||
|| (n >= 2
|
||||
&& this.runLength[n - 2] <= this.runLength[n] + this.runLength[n - 1])) {
|
||||
if (this.runLength[n - 1] < this.runLength[n + 1]) {
|
||||
n--;
|
||||
}
|
||||
} else if (this.runLength[n] > this.runLength[n + 1]) {
|
||||
break;
|
||||
}
|
||||
this.mergeAt(n);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge all runs on TimSort's stack until only one remains.
|
||||
*/
|
||||
forceMergeRuns() {
|
||||
while (this.stackSize > 1) {
|
||||
let n = this.stackSize - 2;
|
||||
|
||||
if (n > 0 && this.runLength[n - 1] < this.runLength[n + 1]) {
|
||||
n--;
|
||||
}
|
||||
|
||||
this.mergeAt(n);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the runs on the stack at positions i and i+1. Must be always be called
|
||||
* with i=stackSize-2 or i=stackSize-3 (that is, we merge on top of the stack).
|
||||
*
|
||||
* @param i - Index of the run to merge in TimSort's stack.
|
||||
*/
|
||||
mergeAt(i: number) {
|
||||
const compare = this.compare;
|
||||
const array = this.array;
|
||||
|
||||
let start1 = this.runStart[i];
|
||||
let length1 = this.runLength[i];
|
||||
const start2 = this.runStart[i + 1];
|
||||
let length2 = this.runLength[i + 1];
|
||||
|
||||
this.runLength[i] = length1 + length2;
|
||||
|
||||
if (i === this.stackSize - 3) {
|
||||
this.runStart[i + 1] = this.runStart[i + 2];
|
||||
this.runLength[i + 1] = this.runLength[i + 2];
|
||||
}
|
||||
|
||||
this.stackSize--;
|
||||
|
||||
/*
|
||||
* Find where the first element in the second run goes in run1. Previous
|
||||
* elements in run1 are already in place
|
||||
*/
|
||||
const k = gallopRight(array[start2], array, start1, length1, 0, compare);
|
||||
start1 += k;
|
||||
length1 -= k;
|
||||
|
||||
if (length1 === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* Find where the last element in the first run goes in run2. Next elements
|
||||
* in run2 are already in place
|
||||
*/
|
||||
length2 = gallopLeft(array[start1 + length1 - 1], array, start2, length2, length2 - 1, compare);
|
||||
|
||||
if (length2 === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* Merge remaining runs. A tmp array with length = min(length1, length2) is
|
||||
* used
|
||||
*/
|
||||
if (length1 <= length2) {
|
||||
this.mergeLow(start1, length1, start2, length2);
|
||||
} else {
|
||||
this.mergeHigh(start1, length1, start2, length2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two adjacent runs in a stable way. The runs must be such that the
|
||||
* first element of run1 is bigger than the first element in run2 and the
|
||||
* last element of run1 is greater than all the elements in run2.
|
||||
* The method should be called when run1.length <= run2.length as it uses
|
||||
* TimSort temporary array to store run1. Use mergeHigh if run1.length >
|
||||
* run2.length.
|
||||
*
|
||||
* @param start1 - First element in run1.
|
||||
* @param length1 - Length of run1.
|
||||
* @param start2 - First element in run2.
|
||||
* @param length2 - Length of run2.
|
||||
*/
|
||||
mergeLow(start1: number, length1: number, start2: number, length2: number) {
|
||||
const compare = this.compare;
|
||||
const array = this.array;
|
||||
const tmp = this.tmp;
|
||||
let i = 0;
|
||||
|
||||
for (i = 0; i < length1; i++) {
|
||||
tmp[i] = array[start1 + i];
|
||||
}
|
||||
|
||||
let cursor1 = 0;
|
||||
let cursor2 = start2;
|
||||
let dest = start1;
|
||||
|
||||
array[dest++] = array[cursor2++];
|
||||
|
||||
if (--length2 === 0) {
|
||||
for (i = 0; i < length1; i++) {
|
||||
array[dest + i] = tmp[cursor1 + i];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (length1 === 1) {
|
||||
for (i = 0; i < length2; i++) {
|
||||
array[dest + i] = array[cursor2 + i];
|
||||
}
|
||||
array[dest + length2] = tmp[cursor1];
|
||||
return;
|
||||
}
|
||||
|
||||
let minGallop = this.minGallop;
|
||||
|
||||
while (true) {
|
||||
let count1 = 0;
|
||||
let count2 = 0;
|
||||
let exit = false;
|
||||
|
||||
do {
|
||||
if (compare(array[cursor2], tmp[cursor1]) < 0) {
|
||||
array[dest++] = array[cursor2++];
|
||||
count2++;
|
||||
count1 = 0;
|
||||
|
||||
if (--length2 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
array[dest++] = tmp[cursor1++];
|
||||
count1++;
|
||||
count2 = 0;
|
||||
if (--length1 === 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while ((count1 | count2) < minGallop);
|
||||
|
||||
if (exit) {
|
||||
break;
|
||||
}
|
||||
|
||||
do {
|
||||
count1 = gallopRight(array[cursor2], tmp, cursor1, length1, 0, compare);
|
||||
|
||||
if (count1 !== 0) {
|
||||
for (i = 0; i < count1; i++) {
|
||||
array[dest + i] = tmp[cursor1 + i];
|
||||
}
|
||||
|
||||
dest += count1;
|
||||
cursor1 += count1;
|
||||
length1 -= count1;
|
||||
if (length1 <= 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
array[dest++] = array[cursor2++];
|
||||
|
||||
if (--length2 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
|
||||
count2 = gallopLeft(tmp[cursor1], array, cursor2, length2, 0, compare);
|
||||
|
||||
if (count2 !== 0) {
|
||||
for (i = 0; i < count2; i++) {
|
||||
array[dest + i] = array[cursor2 + i];
|
||||
}
|
||||
|
||||
dest += count2;
|
||||
cursor2 += count2;
|
||||
length2 -= count2;
|
||||
|
||||
if (length2 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
array[dest++] = tmp[cursor1++];
|
||||
|
||||
if (--length1 === 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
|
||||
minGallop--;
|
||||
} while (count1 >= DEFAULT_MIN_GALLOPING || count2 >= DEFAULT_MIN_GALLOPING);
|
||||
|
||||
if (exit) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (minGallop < 0) {
|
||||
minGallop = 0;
|
||||
}
|
||||
|
||||
minGallop += 2;
|
||||
}
|
||||
|
||||
this.minGallop = minGallop;
|
||||
|
||||
if (minGallop < 1) {
|
||||
this.minGallop = 1;
|
||||
}
|
||||
|
||||
if (length1 === 1) {
|
||||
for (i = 0; i < length2; i++) {
|
||||
array[dest + i] = array[cursor2 + i];
|
||||
}
|
||||
array[dest + length2] = tmp[cursor1];
|
||||
} else if (length1 === 0) {
|
||||
// do nothing
|
||||
} else {
|
||||
for (i = 0; i < length1; i++) {
|
||||
array[dest + i] = tmp[cursor1 + i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two adjacent runs in a stable way. The runs must be such that the
|
||||
* first element of run1 is bigger than the first element in run2 and the
|
||||
* last element of run1 is greater than all the elements in run2.
|
||||
* The method should be called when run1.length > run2.length as it uses
|
||||
* TimSort temporary array to store run2. Use mergeLow if run1.length <=
|
||||
* run2.length.
|
||||
*
|
||||
* @param start1 - First element in run1.
|
||||
* @param length1 - Length of run1.
|
||||
* @param start2 - First element in run2.
|
||||
* @param length2 - Length of run2.
|
||||
*/
|
||||
mergeHigh(start1: number, length1: number, start2: number, length2: number) {
|
||||
const compare = this.compare;
|
||||
const array = this.array;
|
||||
const tmp = this.tmp;
|
||||
let i = 0;
|
||||
|
||||
for (i = 0; i < length2; i++) {
|
||||
tmp[i] = array[start2 + i];
|
||||
}
|
||||
|
||||
let cursor1 = start1 + length1 - 1;
|
||||
let cursor2 = length2 - 1;
|
||||
let dest = start2 + length2 - 1;
|
||||
let customCursor = 0;
|
||||
let customDest = 0;
|
||||
|
||||
array[dest--] = array[cursor1--];
|
||||
|
||||
if (--length1 === 0) {
|
||||
customCursor = dest - (length2 - 1);
|
||||
|
||||
for (i = 0; i < length2; i++) {
|
||||
array[customCursor + i] = tmp[i];
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (length2 === 1) {
|
||||
dest -= length1;
|
||||
cursor1 -= length1;
|
||||
customDest = dest + 1;
|
||||
customCursor = cursor1 + 1;
|
||||
|
||||
for (i = length1 - 1; i >= 0; i--) {
|
||||
array[customDest + i] = array[customCursor + i];
|
||||
}
|
||||
|
||||
array[dest] = tmp[cursor2];
|
||||
return;
|
||||
}
|
||||
|
||||
let minGallop = this.minGallop;
|
||||
|
||||
while (true) {
|
||||
let count1 = 0;
|
||||
let count2 = 0;
|
||||
let exit = false;
|
||||
|
||||
do {
|
||||
if (compare(tmp[cursor2], array[cursor1]) < 0) {
|
||||
array[dest--] = array[cursor1--];
|
||||
count1++;
|
||||
count2 = 0;
|
||||
if (--length1 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
array[dest--] = tmp[cursor2--];
|
||||
count2++;
|
||||
count1 = 0;
|
||||
if (--length2 === 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} while ((count1 | count2) < minGallop);
|
||||
|
||||
if (exit) {
|
||||
break;
|
||||
}
|
||||
|
||||
do {
|
||||
count1 = length1 - gallopRight(tmp[cursor2], array, start1, length1, length1 - 1, compare);
|
||||
|
||||
if (count1 !== 0) {
|
||||
dest -= count1;
|
||||
cursor1 -= count1;
|
||||
length1 -= count1;
|
||||
customDest = dest + 1;
|
||||
customCursor = cursor1 + 1;
|
||||
|
||||
for (i = count1 - 1; i >= 0; i--) {
|
||||
array[customDest + i] = array[customCursor + i];
|
||||
}
|
||||
|
||||
if (length1 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
array[dest--] = tmp[cursor2--];
|
||||
|
||||
if (--length2 === 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
|
||||
count2 = length2 - gallopLeft(array[cursor1], tmp, 0, length2, length2 - 1, compare);
|
||||
|
||||
if (count2 !== 0) {
|
||||
dest -= count2;
|
||||
cursor2 -= count2;
|
||||
length2 -= count2;
|
||||
customDest = dest + 1;
|
||||
customCursor = cursor2 + 1;
|
||||
|
||||
for (i = 0; i < count2; i++) {
|
||||
array[customDest + i] = tmp[customCursor + i];
|
||||
}
|
||||
|
||||
if (length2 <= 1) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
array[dest--] = array[cursor1--];
|
||||
|
||||
if (--length1 === 0) {
|
||||
exit = true;
|
||||
break;
|
||||
}
|
||||
|
||||
minGallop--;
|
||||
} while (count1 >= DEFAULT_MIN_GALLOPING || count2 >= DEFAULT_MIN_GALLOPING);
|
||||
|
||||
if (exit) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (minGallop < 0) {
|
||||
minGallop = 0;
|
||||
}
|
||||
|
||||
minGallop += 2;
|
||||
}
|
||||
|
||||
this.minGallop = minGallop;
|
||||
|
||||
if (minGallop < 1) {
|
||||
this.minGallop = 1;
|
||||
}
|
||||
|
||||
if (length2 === 1) {
|
||||
dest -= length1;
|
||||
cursor1 -= length1;
|
||||
customDest = dest + 1;
|
||||
customCursor = cursor1 + 1;
|
||||
|
||||
for (i = length1 - 1; i >= 0; i--) {
|
||||
array[customDest + i] = array[customCursor + i];
|
||||
}
|
||||
|
||||
array[dest] = tmp[cursor2];
|
||||
} else if (length2 === 0) {
|
||||
// do nothing
|
||||
} else {
|
||||
customCursor = dest - (length2 - 1);
|
||||
for (i = 0; i < length2; i++) {
|
||||
array[customCursor + i] = tmp[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort an array in the range [lo, hi) using TimSort.
|
||||
*
|
||||
* @param array - The array to sort.
|
||||
* @param compare - Item comparison function. Default is
|
||||
* alphabetical
|
||||
* @param lo - First element in the range (inclusive).
|
||||
* @param hi - Last element in the range.
|
||||
* comparator.
|
||||
*/
|
||||
export function sort<T>(array: T[], compare: Comparator<T> | undefined = alphabeticalCompare, lo = 0, hi: number = array.length): T[] {
|
||||
// if (!Array.isArray(array)) {
|
||||
// throw new TypeError('Can only sort arrays');
|
||||
// }
|
||||
|
||||
/*
|
||||
* Handle the case where a comparison function is not provided. We do
|
||||
* lexicographic sorting
|
||||
*/
|
||||
if (typeof compare !== 'function') {
|
||||
hi = lo;
|
||||
lo = compare;
|
||||
compare = alphabeticalCompare;
|
||||
}
|
||||
|
||||
let remaining = hi - lo;
|
||||
|
||||
// The array is already sorted
|
||||
if (remaining < 2) {
|
||||
return array;
|
||||
}
|
||||
|
||||
let runLength = 0;
|
||||
// On small arrays binary sort can be used directly
|
||||
if (remaining < DEFAULT_MIN_MERGE) {
|
||||
runLength = makeAscendingRun(array, lo, hi, compare);
|
||||
binaryInsertionSort(array, lo, hi, lo + runLength, compare);
|
||||
return array;
|
||||
}
|
||||
|
||||
const ts = new TimSort(array, compare);
|
||||
|
||||
const minRun = minRunLength(remaining);
|
||||
|
||||
do {
|
||||
runLength = makeAscendingRun(array, lo, hi, compare);
|
||||
if (runLength < minRun) {
|
||||
let force = remaining;
|
||||
if (force > minRun) {
|
||||
force = minRun;
|
||||
}
|
||||
|
||||
binaryInsertionSort(array, lo, lo + force, lo + runLength, compare);
|
||||
runLength = force;
|
||||
}
|
||||
// Push new run and merge if necessary
|
||||
ts.pushRun(lo, runLength);
|
||||
ts.mergeRuns();
|
||||
|
||||
// Go find next run
|
||||
remaining -= runLength;
|
||||
lo += runLength;
|
||||
} while (remaining !== 0);
|
||||
|
||||
// Force merging of remaining runs
|
||||
ts.forceMergeRuns();
|
||||
|
||||
return array;
|
||||
}
|
||||
@ -5,8 +5,6 @@
|
||||
import { fastStringArrayJoin } from './misc';
|
||||
import { inspect } from 'util';
|
||||
|
||||
// const { Error, Bun, JSON, Symbol } = globalThis;
|
||||
|
||||
const noop = () => { /** noop */ };
|
||||
|
||||
type TrieNode = [
|
||||
|
||||
@ -2,12 +2,7 @@
|
||||
|
||||
module.exports = require('eslint-config-sukka').sukka({
|
||||
js: {
|
||||
disableNoConsoleInCLI: ['Build/**'],
|
||||
env: {
|
||||
customGlobals: {
|
||||
Bun: 'readonly'
|
||||
}
|
||||
}
|
||||
disableNoConsoleInCLI: ['Build/**']
|
||||
},
|
||||
node: true,
|
||||
ts: true
|
||||
|
||||
11
package.json
11
package.json
@ -7,11 +7,12 @@
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/SukkaW/Surge.git"
|
||||
},
|
||||
"type": "commonjs",
|
||||
"scripts": {
|
||||
"build": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true node -r @swc-node/register ./Build/index.ts",
|
||||
"build-bun": "bun ./Build/index.ts",
|
||||
"build-bun-stream": "ENABLE_TEXT_LINE_STREAM=true bun ./Build/index.ts",
|
||||
"build-profile": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true dexnode -r @swc-node/register ./Build/index.ts",
|
||||
"node": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true node -r @swc-node/register",
|
||||
"dexnode": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true dexnode -r @swc-node/register",
|
||||
"build": "pnpm run node ./Build/index.ts",
|
||||
"build-profile": "pnpm run dexnode -r @swc-node/register ./Build/index.ts",
|
||||
"lint": "eslint --format=sukka ."
|
||||
},
|
||||
"author": "",
|
||||
@ -43,10 +44,8 @@
|
||||
"@swc/core": "^1.7.0",
|
||||
"@types/async-retry": "^1.4.8",
|
||||
"@types/better-sqlite3": "^7.6.11",
|
||||
"@types/bun": "^1.1.6",
|
||||
"@types/punycode": "^2.1.4",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
"bun-types": "^1.1.20",
|
||||
"eslint": "^9.7.0",
|
||||
"eslint-config-sukka": "^6.1.6",
|
||||
"eslint-formatter-sukka": "^6.1.6",
|
||||
|
||||
53
pnpm-lock.yaml
generated
53
pnpm-lock.yaml
generated
@ -84,18 +84,12 @@ importers:
|
||||
'@types/better-sqlite3':
|
||||
specifier: ^7.6.11
|
||||
version: 7.6.11
|
||||
'@types/bun':
|
||||
specifier: ^1.1.6
|
||||
version: 1.1.6
|
||||
'@types/punycode':
|
||||
specifier: ^2.1.4
|
||||
version: 2.1.4
|
||||
'@types/tar-stream':
|
||||
specifier: ^3.1.3
|
||||
version: 3.1.3
|
||||
bun-types:
|
||||
specifier: ^1.1.20
|
||||
version: 1.1.20
|
||||
eslint:
|
||||
specifier: ^9.7.0
|
||||
version: 9.7.0
|
||||
@ -394,9 +388,6 @@ packages:
|
||||
'@types/better-sqlite3@7.6.11':
|
||||
resolution: {integrity: sha512-i8KcD3PgGtGBLl3+mMYA8PdKkButvPyARxA7IQAd6qeslht13qxb1zzO8dRCtE7U3IoJS782zDBAeoKiM695kg==}
|
||||
|
||||
'@types/bun@1.1.6':
|
||||
resolution: {integrity: sha512-uJgKjTdX0GkWEHZzQzFsJkWp5+43ZS7HC8sZPFnOwnSo1AsNl2q9o2bFeS23disNDqbggEgyFkKCHl/w8iZsMA==}
|
||||
|
||||
'@types/chrome@0.0.268':
|
||||
resolution: {integrity: sha512-7N1QH9buudSJ7sI8Pe4mBHJr5oZ48s0hcanI9w3wgijAlv1OZNUZve9JR4x42dn5lJ5Sm87V1JNfnoh10EnQlA==}
|
||||
|
||||
@ -421,9 +412,6 @@ packages:
|
||||
'@types/json-schema@7.0.15':
|
||||
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
|
||||
|
||||
'@types/node@20.12.14':
|
||||
resolution: {integrity: sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg==}
|
||||
|
||||
'@types/node@20.14.11':
|
||||
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
|
||||
|
||||
@ -436,9 +424,6 @@ packages:
|
||||
'@types/tar-stream@3.1.3':
|
||||
resolution: {integrity: sha512-Zbnx4wpkWBMBSu5CytMbrT5ZpMiF55qgM+EpHzR4yIDu7mv52cej8hTkOc6K+LzpkOAbxwn/m7j3iO+/l42YkQ==}
|
||||
|
||||
'@types/ws@8.5.11':
|
||||
resolution: {integrity: sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w==}
|
||||
|
||||
'@typescript-eslint/eslint-plugin@8.0.0-alpha.45':
|
||||
resolution: {integrity: sha512-h+pGHKWu+i5D6BmzpggG8bDj/fVVhxzQLE2CPsKtH1ab0QvUz+eyT/lIfz0xs8NF/lQS7tmlU5AYnQdKe1yAQw==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@ -481,10 +466,6 @@ packages:
|
||||
resolution: {integrity: sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A==}
|
||||
engines: {node: ^18.18.0 || >=20.0.0}
|
||||
|
||||
'@typescript-eslint/types@8.0.0-alpha.36':
|
||||
resolution: {integrity: sha512-D+w5uE8Y83K/P5VQZyKKi4pwTL2YkWOwtQOVJQI38Rp8f3pmY+Jmcps3wkSFSJK8wifTlvoHwwIBf1FsdCW/EA==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
|
||||
'@typescript-eslint/types@8.0.0-alpha.45':
|
||||
resolution: {integrity: sha512-yjTlmcSnkFV8IoqE0vinmWo+fl7TjkaGyGX/g9gKN/b2IO8g+AimB7BhilmlBqvZupvo2AfiHqcnZEVhQAXI8w==}
|
||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||
@ -605,12 +586,6 @@ packages:
|
||||
buffer@5.7.1:
|
||||
resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
|
||||
|
||||
bun-types@1.1.17:
|
||||
resolution: {integrity: sha512-Z4+OplcSd/YZq7ZsrfD00DKJeCwuNY96a1IDJyR73+cTBaFIS7SC6LhpY/W3AMEXO9iYq5NJ58WAwnwL1p5vKg==}
|
||||
|
||||
bun-types@1.1.20:
|
||||
resolution: {integrity: sha512-2u84HciDR3E7Uc0t0AEeXHmQAWe9uzRKTz120D3silIJOQlbGIMJMJiGaM8Yx7nEvMyfV0LfSdkEGnb77AN5AA==}
|
||||
|
||||
callsites@3.1.0:
|
||||
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
|
||||
engines: {node: '>=6'}
|
||||
@ -1670,10 +1645,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 20.14.11
|
||||
|
||||
'@types/bun@1.1.6':
|
||||
dependencies:
|
||||
bun-types: 1.1.17
|
||||
|
||||
'@types/chrome@0.0.268':
|
||||
dependencies:
|
||||
'@types/filesystem': 0.0.36
|
||||
@ -1698,10 +1669,6 @@ snapshots:
|
||||
|
||||
'@types/json-schema@7.0.15': {}
|
||||
|
||||
'@types/node@20.12.14':
|
||||
dependencies:
|
||||
undici-types: 5.26.5
|
||||
|
||||
'@types/node@20.14.11':
|
||||
dependencies:
|
||||
undici-types: 5.26.5
|
||||
@ -1714,10 +1681,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 20.14.11
|
||||
|
||||
'@types/ws@8.5.11':
|
||||
dependencies:
|
||||
'@types/node': 20.12.14
|
||||
|
||||
'@typescript-eslint/eslint-plugin@8.0.0-alpha.45(@typescript-eslint/parser@8.0.0-alpha.45(eslint@9.7.0)(typescript@5.5.4))(eslint@9.7.0)(typescript@5.5.4)':
|
||||
dependencies:
|
||||
'@eslint-community/regexpp': 4.11.0
|
||||
@ -1773,8 +1736,6 @@ snapshots:
|
||||
|
||||
'@typescript-eslint/types@7.17.0': {}
|
||||
|
||||
'@typescript-eslint/types@8.0.0-alpha.36': {}
|
||||
|
||||
'@typescript-eslint/types@8.0.0-alpha.45': {}
|
||||
|
||||
'@typescript-eslint/typescript-estree@7.17.0(typescript@5.5.4)':
|
||||
@ -1794,7 +1755,7 @@ snapshots:
|
||||
|
||||
'@typescript-eslint/typescript-estree@8.0.0-alpha.45(typescript@5.5.4)':
|
||||
dependencies:
|
||||
'@typescript-eslint/types': 8.0.0-alpha.36
|
||||
'@typescript-eslint/types': 8.0.0-alpha.45
|
||||
'@typescript-eslint/visitor-keys': 8.0.0-alpha.45
|
||||
debug: 4.3.5
|
||||
globby: 11.1.0
|
||||
@ -1836,7 +1797,7 @@ snapshots:
|
||||
|
||||
'@typescript-eslint/visitor-keys@8.0.0-alpha.45':
|
||||
dependencies:
|
||||
'@typescript-eslint/types': 8.0.0-alpha.36
|
||||
'@typescript-eslint/types': 8.0.0-alpha.45
|
||||
eslint-visitor-keys: 3.4.3
|
||||
|
||||
acorn-jsx@5.3.2(acorn@8.12.1):
|
||||
@ -1921,16 +1882,6 @@ snapshots:
|
||||
base64-js: 1.5.1
|
||||
ieee754: 1.2.1
|
||||
|
||||
bun-types@1.1.17:
|
||||
dependencies:
|
||||
'@types/node': 20.12.14
|
||||
'@types/ws': 8.5.11
|
||||
|
||||
bun-types@1.1.20:
|
||||
dependencies:
|
||||
'@types/node': 20.12.14
|
||||
'@types/ws': 8.5.11
|
||||
|
||||
callsites@3.1.0: {}
|
||||
|
||||
chalk@4.1.2:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user