Refactor: drop all Bun compatability

This commit is contained in:
SukkaW 2024-07-24 10:59:05 +08:00
parent f30f9774c3
commit bb65a4180c
33 changed files with 112 additions and 1150 deletions

View File

@ -17,7 +17,7 @@ export const getAppleCdnDomainsPromise = createMemoizedPromise(() => fsFetchCach
} }
)); ));
export const buildAppleCdn = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildAppleCdn = task(require.main === module, __filename)(async (span) => {
const res: string[] = await span.traceChildPromise('get apple cdn domains', getAppleCdnDomainsPromise()); const res: string[] = await span.traceChildPromise('get apple cdn domains', getAppleCdnDomainsPromise());
const description = [ const description = [

View File

@ -48,7 +48,7 @@ const getS3OSSDomainsPromise = (async (): Promise<string[]> => {
return Array.from(S3OSSDomains); return Array.from(S3OSSDomains);
})(); })();
export const buildCdnDownloadConf = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildCdnDownloadConf = task(require.main === module, __filename)(async (span) => {
const [ const [
S3OSSDomains, S3OSSDomains,

View File

@ -16,7 +16,7 @@ export const getChnCidrPromise = createMemoizedPromise(async () => {
return exclude(cidr, NON_CN_CIDR_INCLUDED_IN_CHNROUTE, true); return exclude(cidr, NON_CN_CIDR_INCLUDED_IN_CHNROUTE, true);
}); });
export const buildChnCidr = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildChnCidr = task(require.main === module, __filename)(async (span) => {
const filteredCidr = await span.traceChildAsync('download chnroutes2', getChnCidrPromise); const filteredCidr = await span.traceChildAsync('download chnroutes2', getChnCidrPromise);
// Can not use SHARED_DESCRIPTION here as different license // Can not use SHARED_DESCRIPTION here as different license

View File

@ -7,7 +7,7 @@ import { task } from './trace';
const outputSurgeDir = path.resolve(__dirname, '../List'); const outputSurgeDir = path.resolve(__dirname, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash'); const outputClashDir = path.resolve(__dirname, '../Clash');
export const buildCloudMounterRules = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildCloudMounterRules = task(require.main === module, __filename)(async (span) => {
// AND,((SRC-IP,192.168.1.110), (DOMAIN, example.com)) // AND,((SRC-IP,192.168.1.110), (DOMAIN, example.com))
const results = DOMAINS.flatMap(domain => { const results = DOMAINS.flatMap(domain => {

View File

@ -21,7 +21,7 @@ const outputClashDir = path.resolve(__dirname, '../Clash');
const domainsetSrcFolder = 'domainset' + path.sep; const domainsetSrcFolder = 'domainset' + path.sep;
export const buildCommon = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildCommon = task(require.main === module, __filename)(async (span) => {
const promises: Array<Promise<unknown>> = []; const promises: Array<Promise<unknown>> = [];
const paths = await new Fdir() const paths = await new Fdir()

View File

@ -11,7 +11,7 @@ const DEPRECATED_FILES = [
const outputSurgeDir = path.resolve(__dirname, '../List'); const outputSurgeDir = path.resolve(__dirname, '../List');
const outputClashDir = path.resolve(__dirname, '../Clash'); const outputClashDir = path.resolve(__dirname, '../Clash');
export const buildDeprecateFiles = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)((span) => span.traceChildAsync('create deprecated files', async (childSpan) => { export const buildDeprecateFiles = task(require.main === module, __filename)((span) => span.traceChildAsync('create deprecated files', async (childSpan) => {
const promises: Array<Promise<unknown>> = []; const promises: Array<Promise<unknown>> = [];
for (const [filePath, description] of DEPRECATED_FILES) { for (const [filePath, description] of DEPRECATED_FILES) {

View File

@ -29,7 +29,7 @@ export const getDomesticAndDirectDomainsRulesetPromise = createMemoizedPromise(a
return [domestics, directs, lans] as const; return [domestics, directs, lans] as const;
}); });
export const buildDomesticRuleset = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildDomesticRuleset = task(require.main === module, __filename)(async (span) => {
const res = await getDomesticAndDirectDomainsRulesetPromise(); const res = await getDomesticAndDirectDomainsRulesetPromise();
const dataset = Object.entries(DOMESTICS); const dataset = Object.entries(DOMESTICS);

View File

@ -7,7 +7,7 @@ import { NON_CN_CIDR_INCLUDED_IN_CHNROUTE, RESERVED_IPV4_CIDR } from './constant
import { writeFile } from './lib/bun'; import { writeFile } from './lib/bun';
export const buildInternalReverseChnCIDR = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async () => { export const buildInternalReverseChnCIDR = task(require.main === module, __filename)(async () => {
const cidr = await getChnCidrPromise(); const cidr = await getChnCidrPromise();
const reversedCidr = merge( const reversedCidr = merge(

View File

@ -44,7 +44,7 @@ export const getMicrosoftCdnRulesetPromise = createMemoizedPromise(async () => {
.concat(WHITELIST); .concat(WHITELIST);
}); });
export const buildMicrosoftCdn = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildMicrosoftCdn = task(require.main === module, __filename)(async (span) => {
const description = [ const description = [
...SHARED_DESCRIPTION, ...SHARED_DESCRIPTION,
'', '',

View File

@ -5,7 +5,6 @@ import { task } from './trace';
import { treeDir } from './lib/tree-dir'; import { treeDir } from './lib/tree-dir';
import type { TreeType, TreeTypeArray } from './lib/tree-dir'; import type { TreeType, TreeTypeArray } from './lib/tree-dir';
import { fdir as Fdir } from 'fdir'; import { fdir as Fdir } from 'fdir';
import { sort } from './lib/timsort';
import Trie from 'mnemonist/trie'; import Trie from 'mnemonist/trie';
import { writeFile } from './lib/bun'; import { writeFile } from './lib/bun';
@ -23,7 +22,7 @@ const folderAndFilesToBeDeployed = [
'LICENSE' 'LICENSE'
]; ];
export const buildPublic = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildPublic = task(require.main === module, __filename)(async (span) => {
fs.mkdirSync(publicPath, { recursive: true }); fs.mkdirSync(publicPath, { recursive: true });
await span await span
@ -89,7 +88,7 @@ const html = (string: TemplateStringsArray, ...values: any[]) => string.reduce((
const walk = (tree: TreeTypeArray) => { const walk = (tree: TreeTypeArray) => {
let result = ''; let result = '';
sort(tree, prioritySorter); tree.sort(prioritySorter);
for (let i = 0, len = tree.length; i < len; i++) { for (let i = 0, len = tree.length; i < len; i++) {
const entry = tree[i]; const entry = tree[i];
if (entry.type === 'directory') { if (entry.type === 'directory') {

View File

@ -18,11 +18,10 @@ import { SHARED_DESCRIPTION } from './lib/constants';
import { getPhishingDomains } from './lib/get-phishing-domains'; import { getPhishingDomains } from './lib/get-phishing-domains';
import { setAddFromArray, setAddFromArrayCurried } from './lib/set-add-from-array'; import { setAddFromArray, setAddFromArrayCurried } from './lib/set-add-from-array';
import { sort } from './lib/timsort';
const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf')); const getRejectSukkaConfPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/domainset/reject_sukka.conf'));
export const buildRejectDomainSet = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildRejectDomainSet = task(require.main === module, __filename)(async (span) => {
/** Whitelists */ /** Whitelists */
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST); const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
@ -171,7 +170,7 @@ export const buildRejectDomainSet = task(typeof Bun !== 'undefined' ? Bun.main =
return acc; return acc;
}, new Map()); }, new Map());
return sort(Array.from(statMap.entries()).filter(a => a[1] > 9), (a, b) => (b[1] - a[1]) || a[0].localeCompare(b[0])); return Array.from(statMap.entries()).filter(a => a[1] > 9).sort((a, b) => (b[1] - a[1]) || a[0].localeCompare(b[0]));
}); });
return Promise.all([ return Promise.all([

View File

@ -67,7 +67,7 @@ const getBotNetFilterIPsPromise = fsFetchCache.apply(
const localRejectIPSourcesPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/ip/reject.conf')); const localRejectIPSourcesPromise = readFileIntoProcessedArray(path.resolve(__dirname, '../Source/ip/reject.conf'));
export const buildRejectIPList = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildRejectIPList = task(require.main === module, __filename)(async (span) => {
const result = await localRejectIPSourcesPromise; const result = await localRejectIPSourcesPromise;
const bogusNxDomainIPs = await span.traceChildPromise('get bogus nxdomain ips', getBogusNxDomainIPsPromise); const bogusNxDomainIPs = await span.traceChildPromise('get bogus nxdomain ips', getBogusNxDomainIPsPromise);

View File

@ -43,7 +43,7 @@ const HOSTNAMES = [
'*.battlenet.com' '*.battlenet.com'
]; ];
export const buildAlwaysRealIPModule = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildAlwaysRealIPModule = task(require.main === module, __filename)(async (span) => {
// Intranet, Router Setup, and mant more // Intranet, Router Setup, and mant more
const dataset = [Object.entries(DIRECTS), Object.entries(LANS)]; const dataset = [Object.entries(DIRECTS), Object.entries(LANS)];
const surge = dataset.flatMap(data => data.flatMap(([, { domains }]) => domains.flatMap((domain) => [`*.${domain}`, domain]))); const surge = dataset.flatMap(data => data.flatMap(([, { domains }]) => domains.flatMap((domain) => [`*.${domain}`, domain])));

View File

@ -120,7 +120,7 @@ const REDIRECT_FAKEWEBSITES = [
['zbrushcn.com', 'https://www.maxon.net/en/zbrush'] ['zbrushcn.com', 'https://www.maxon.net/en/zbrush']
] as const; ] as const;
export const buildRedirectModule = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildRedirectModule = task(require.main === module, __filename)(async (span) => {
const domains = Array.from(new Set([ const domains = Array.from(new Set([
...REDIRECT_MIRROR.map(([from]) => getHostname(from, { detectIp: false })), ...REDIRECT_MIRROR.map(([from]) => getHostname(from, { detectIp: false })),
...REDIRECT_FAKEWEBSITES.flatMap(([from]) => [from, `www.${from}`]) ...REDIRECT_FAKEWEBSITES.flatMap(([from]) => [from, `www.${from}`])

View File

@ -13,7 +13,7 @@ import { readFileIntoProcessedArray } from './lib/fetch-text-by-line';
import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem'; import { TTL, deserializeArray, fsFetchCache, serializeArray } from './lib/cache-filesystem';
import { createTrie } from './lib/trie'; import { createTrie } from './lib/trie';
import { peek } from './lib/bun'; import { peek, track } from './lib/bun';
const s = new Sema(2); const s = new Sema(2);
@ -82,7 +82,7 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
} }
}; };
export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildSpeedtestDomainSet = task(require.main === module, __filename)(async (span) => {
const domainTrie = createTrie( const domainTrie = createTrie(
[ [
// speedtest.net // speedtest.net
@ -226,13 +226,13 @@ export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.mai
'Brazil', 'Brazil',
'Turkey' 'Turkey'
]).reduce<Record<string, Promise<void>>>((pMap, keyword) => { ]).reduce<Record<string, Promise<void>>>((pMap, keyword) => {
pMap[keyword] = span.traceChildAsync(`fetch speedtest endpoints: ${keyword}`, () => querySpeedtestApi(keyword)).then(hostnameGroup => { pMap[keyword] = track(span.traceChildAsync(`fetch speedtest endpoints: ${keyword}`, () => querySpeedtestApi(keyword)).then(hostnameGroup => {
return hostnameGroup.forEach(hostname => { return hostnameGroup.forEach(hostname => {
if (hostname) { if (hostname) {
domainTrie.add(hostname); domainTrie.add(hostname);
} }
}); });
}); }));
return pMap; return pMap;
}, {}); }, {});
@ -240,7 +240,7 @@ export const buildSpeedtestDomainSet = task(typeof Bun !== 'undefined' ? Bun.mai
const timer = setTimeout(() => { const timer = setTimeout(() => {
console.error(picocolors.red('Task timeout!')); console.error(picocolors.red('Task timeout!'));
Object.entries(pMap).forEach(([name, p]) => { Object.entries(pMap).forEach(([name, p]) => {
console.log(`[${name}]`, peek.status(p)); console.log(`[${name}]`, peek(p));
}); });
resolve(); resolve();

View File

@ -28,7 +28,7 @@ const removeNoResolved = (line: string) => line.replace(',no-resolve', '');
/** /**
* This only generates a simplified version, for under-used users only. * This only generates a simplified version, for under-used users only.
*/ */
export const buildSSPanelUIMAppProfile = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildSSPanelUIMAppProfile = task(require.main === module, __filename)(async (span) => {
const [ const [
[domesticDomains, directDomains, lanDomains], [domesticDomains, directDomains, lanDomains],
appleCdnDomains, appleCdnDomains,

View File

@ -50,7 +50,7 @@ export const createRulesetForStreamService = (span: Span, fileId: string, title:
])); ]));
}; };
export const buildStreamService = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildStreamService = task(require.main === module, __filename)(async (span) => {
return Promise.all([ return Promise.all([
createRulesetForStreamService(span, 'stream', 'All', ALL), createRulesetForStreamService(span, 'stream', 'All', ALL),
createRulesetForStreamService(span, 'stream_us', 'North America', NORTH_AMERICA), createRulesetForStreamService(span, 'stream_us', 'North America', NORTH_AMERICA),

View File

@ -32,7 +32,7 @@ export const getTelegramCIDRPromise = createMemoizedPromise(async () => {
return { date, results }; return { date, results };
}); });
export const buildTelegramCIDR = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const buildTelegramCIDR = task(require.main === module, __filename)(async (span) => {
const { date, results } = await span.traceChildAsync('get telegram cidr', getTelegramCIDRPromise); const { date, results } = await span.traceChildAsync('get telegram cidr', getTelegramCIDRPromise);
if (results.length === 0) { if (results.length === 0) {

View File

@ -15,7 +15,7 @@ const ASSETS_LIST = {
const mockDir = path.resolve(__dirname, '../Mock'); const mockDir = path.resolve(__dirname, '../Mock');
export const downloadMockAssets = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map( export const downloadMockAssets = task(require.main === module, __filename)((span) => Promise.all(Object.entries(ASSETS_LIST).map(
([filename, url]) => span ([filename, url]) => span
.traceChild(url) .traceChild(url)
.traceAsyncFn(() => fetchWithRetry(url).then(res => { .traceAsyncFn(() => fetchWithRetry(url).then(res => {

View File

@ -13,7 +13,7 @@ import { Readable } from 'stream';
const IS_READING_BUILD_OUTPUT = 1 << 2; const IS_READING_BUILD_OUTPUT = 1 << 2;
const ALL_FILES_EXISTS = 1 << 3; const ALL_FILES_EXISTS = 1 << 3;
export const downloadPreviousBuild = task(typeof Bun !== 'undefined' ? Bun.main === __filename : require.main === module, __filename)(async (span) => { export const downloadPreviousBuild = task(require.main === module, __filename)(async (span) => {
const buildOutputList: string[] = []; const buildOutputList: string[] = [];
let flag = 1 | ALL_FILES_EXISTS; let flag = 1 | ALL_FILES_EXISTS;

View File

@ -1,4 +1,4 @@
console.log('Version:', process.version, typeof Bun !== 'undefined' ? Bun.revision : ''); console.log('Version:', process.version);
import { downloadPreviousBuild } from './download-previous-build'; import { downloadPreviousBuild } from './download-previous-build';
import { buildCommon } from './build-common'; import { buildCommon } from './build-common';

View File

@ -2,19 +2,29 @@ import { dirname } from 'path';
import fs from 'fs'; import fs from 'fs';
import fsp from 'fs/promises'; import fsp from 'fs/promises';
interface Peek { const peekStatus = new WeakMap<Promise<any>, 'pending' | 'rejected' | 'fulfilled'>();
<T = undefined>(promise: T | Promise<T>): Promise<T> | T, export function track<T>(promise: Promise<T>): Promise<T> {
status<T = undefined>( // only set to pending if not already tracked
promise: T | Promise<T>, if (!peekStatus.has(promise)) {
): 'pending' | 'fulfilled' | 'rejected' | 'unknown' peekStatus.set(promise, 'pending');
}
// Observe the promise, saving the fulfillment in a closure scope.
return promise.then(
(v) => {
peekStatus.set(promise, 'fulfilled');
return v;
},
(e) => {
peekStatus.set(promise, 'rejected');
throw e;
}
);
} }
const noopPeek = <T = undefined>(_: Promise<T>) => _; export function peek(promise: Promise<any>): 'pending' | 'rejected' | 'fulfilled' | 'unknown' {
noopPeek.status = () => 'unknown'; return peekStatus.get(promise) ?? 'unknown';
}
export const peek: Peek = typeof Bun !== 'undefined'
? Bun.peek
: noopPeek as Peek;
interface Write { interface Write {
( (
@ -23,13 +33,11 @@ interface Write {
): Promise<unknown> ): Promise<unknown>
} }
export const writeFile: Write = typeof Bun !== 'undefined' export const writeFile: Write = async (destination: string, input) => {
? Bun.write const dir = dirname(destination);
: (async (destination: string, input) => {
const dir = dirname(destination);
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
await fsp.mkdir(dir, { recursive: true }); await fsp.mkdir(dir, { recursive: true });
} }
return fsp.writeFile(destination, input, { encoding: 'utf-8' }); return fsp.writeFile(destination, input, { encoding: 'utf-8' });
}); };

View File

@ -5,7 +5,6 @@ import path from 'path';
import { mkdirSync } from 'fs'; import { mkdirSync } from 'fs';
import picocolors from 'picocolors'; import picocolors from 'picocolors';
import { fastStringArrayJoin } from './misc'; import { fastStringArrayJoin } from './misc';
import { peek } from './bun';
import { performance } from 'perf_hooks'; import { performance } from 'perf_hooks';
const identity = (x: any) => x; const identity = (x: any) => x;
@ -178,32 +177,23 @@ export class Cache<S = string> {
} }
const cached = this.get(key); const cached = this.get(key);
let value: T;
if (cached == null) { if (cached == null) {
console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`)); console.log(picocolors.yellow('[cache] miss'), picocolors.gray(key), picocolors.gray(`ttl: ${TTL.humanReadable(ttl)}`));
const serializer = 'serializer' in opt ? opt.serializer : identity; const serializer = 'serializer' in opt ? opt.serializer : identity;
const promise = fn(); const promise = fn();
const peeked = peek(promise);
if (peeked === promise) { return promise.then((value) => {
return promise.then((value) => { this.set(key, serializer(value), ttl);
this.set(key, serializer(value), ttl); return value;
return value; });
});
}
value = peeked as T;
this.set(key, serializer(value), ttl);
} else {
console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
value = deserializer(cached);
} }
return value; console.log(picocolors.green('[cache] hit'), picocolors.gray(key));
const deserializer = 'deserializer' in opt ? opt.deserializer : identity;
return deserializer(cached);
} }
destroy() { destroy() {

View File

@ -4,7 +4,6 @@ import picocolors from 'picocolors';
import type { Span } from '../trace'; import type { Span } from '../trace';
import path from 'path'; import path from 'path';
import fs from 'fs'; import fs from 'fs';
import { sort } from './timsort';
import { fastStringArrayJoin } from './misc'; import { fastStringArrayJoin } from './misc';
import { readFileByLine } from './fetch-text-by-line'; import { readFileByLine } from './fetch-text-by-line';
import { writeFile } from './bun'; import { writeFile } from './bun';
@ -121,33 +120,31 @@ const sortTypeOrder: Record<string | typeof defaultSortTypeOrder, number> = {
}; };
// sort DOMAIN-SUFFIX and DOMAIN first, then DOMAIN-KEYWORD, then IP-CIDR and IP-CIDR6 if any // sort DOMAIN-SUFFIX and DOMAIN first, then DOMAIN-KEYWORD, then IP-CIDR and IP-CIDR6 if any
export const sortRuleSet = (ruleSet: string[]) => { export const sortRuleSet = (ruleSet: string[]) => {
return sort( return ruleSet.map((rule) => {
ruleSet.map((rule) => { const type = collectType(rule);
const type = collectType(rule); if (!type) {
if (!type) { return [10, rule] as const;
return [10, rule] as const; }
if (!(type in sortTypeOrder)) {
return [sortTypeOrder[defaultSortTypeOrder], rule] as const;
}
if (type === 'URL-REGEX') {
let extraWeight = 0;
if (rule.includes('.+') || rule.includes('.*')) {
extraWeight += 10;
} }
if (!(type in sortTypeOrder)) { if (rule.includes('|')) {
return [sortTypeOrder[defaultSortTypeOrder], rule] as const; extraWeight += 1;
} }
if (type === 'URL-REGEX') {
let extraWeight = 0;
if (rule.includes('.+') || rule.includes('.*')) {
extraWeight += 10;
}
if (rule.includes('|')) {
extraWeight += 1;
}
return [ return [
sortTypeOrder[type] + extraWeight, sortTypeOrder[type] + extraWeight,
rule rule
] as const; ] as const;
} }
return [sortTypeOrder[type], rule] as const; return [sortTypeOrder[type], rule] as const;
}), }).sort((a, b) => a[0] - b[0])
(a, b) => a[0] - b[0] .map(c => c[1]);
).map(c => c[1]);
}; };
const MARK = 'this_ruleset_is_made_by_sukkaw.ruleset.skk.moe'; const MARK = 'this_ruleset_is_made_by_sukkaw.ruleset.skk.moe';

View File

@ -1,12 +1,11 @@
import fs from 'fs'; import fs from 'fs';
import { Readable } from 'stream'; import { Readable } from 'stream';
import type { BunFile } from 'bun';
import { fetchWithRetry, defaultRequestInit } from './fetch-retry'; import { fetchWithRetry, defaultRequestInit } from './fetch-retry';
import type { FileHandle } from 'fs/promises'; import type { FileHandle } from 'fs/promises';
import { TextLineStream } from './text-line-transform-stream'; import { TextLineStream } from './text-line-transform-stream';
import { PolyfillTextDecoderStream } from './text-decoder-stream'; import type { ReadableStream } from 'stream/web';
import { TextDecoderStream as NodeTextDecoderStream } from 'stream/web'; import { TextDecoderStream } from 'stream/web';
import { processLine } from './process-line'; import { processLine } from './process-line';
const enableTextLineStream = !!process.env.ENABLE_TEXT_LINE_STREAM; const enableTextLineStream = !!process.env.ENABLE_TEXT_LINE_STREAM;
@ -39,33 +38,17 @@ async function *createTextLineAsyncIterableFromStreamSource(stream: ReadableStre
} }
} }
const getReadableStream = typeof Bun !== 'undefined' const getReadableStream = (file: string | FileHandle): ReadableStream => {
? (file: string | BunFile | FileHandle): ReadableStream => { if (typeof file === 'string') {
if (typeof file === 'string') { return Readable.toWeb(fs.createReadStream(file /* { encoding: 'utf-8' } */));
return Bun.file(file).stream();
}
if ('writer' in file) {
return file.stream();
}
return file.readableWebStream();
} }
: (file: string | BunFile | FileHandle): ReadableStream => { return file.readableWebStream();
if (typeof file === 'string') { };
return Readable.toWeb(fs.createReadStream(file /* { encoding: 'utf-8' } */));
}
if ('writer' in file) {
return file.stream();
}
return file.readableWebStream();
};
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- On Bun, NodeTextDecoderStream is undefined
const TextDecoderStream = NodeTextDecoderStream ?? PolyfillTextDecoderStream;
// TODO: use FileHandle.readLine() // TODO: use FileHandle.readLine()
export const readFileByLine: ((file: string | BunFile | FileHandle) => AsyncIterable<string>) = enableTextLineStream export const readFileByLine: ((file: string | FileHandle) => AsyncIterable<string>) = enableTextLineStream
? (file: string | BunFile | FileHandle) => getReadableStream(file).pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream()) ? (file: string | FileHandle) => getReadableStream(file).pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream())
: (file: string | BunFile | FileHandle) => createTextLineAsyncIterableFromStreamSource(getReadableStream(file)); : (file: string | FileHandle) => createTextLineAsyncIterableFromStreamSource(getReadableStream(file));
const ensureResponseBody = (resp: Response) => { const ensureResponseBody = (resp: Response) => {
if (!resp.body) { if (!resp.body) {
@ -85,7 +68,7 @@ export function fetchRemoteTextByLine(url: string | URL) {
return fetchWithRetry(url, defaultRequestInit).then(createReadlineInterfaceFromResponse); return fetchWithRetry(url, defaultRequestInit).then(createReadlineInterfaceFromResponse);
} }
export async function readFileIntoProcessedArray(file: string | BunFile | FileHandle) { export async function readFileIntoProcessedArray(file: string | FileHandle) {
const results = []; const results = [];
for await (const line of readFileByLine(file)) { for await (const line of readFileByLine(file)) {
if (processLine(line)) { if (processLine(line)) {

View File

@ -4,13 +4,11 @@ import { createMemoizedPromise } from './memo-promise';
import { getPublicSuffixListTextPromise } from './download-publicsuffixlist'; import { getPublicSuffixListTextPromise } from './download-publicsuffixlist';
import { fileURLToPath } from 'url'; import { fileURLToPath } from 'url';
const customFetch = typeof Bun !== 'undefined' const customFetch = async (url: string | URL) => {
? (url: string | URL) => Promise.resolve(Bun.file(url)) const filePath = fileURLToPath(url);
: async (url: string | URL) => { const file = await fsp.readFile(filePath);
const filePath = fileURLToPath(url); return new Blob([file]) as any;
const file = await fsp.readFile(filePath); };
return new Blob([file]) as any;
};
export const getGorhillPublicSuffixPromise = createMemoizedPromise(async () => { export const getGorhillPublicSuffixPromise = createMemoizedPromise(async () => {
const [publicSuffixListDat, { default: gorhill }] = await Promise.all([ const [publicSuffixListDat, { default: gorhill }] = await Promise.all([

View File

@ -2,7 +2,6 @@
// (since it is hashes based). But the result is still deterministic, which is // (since it is hashes based). But the result is still deterministic, which is
// enough when sorting. // enough when sorting.
import * as tldts from 'tldts-experimental'; import * as tldts from 'tldts-experimental';
import { sort } from './timsort';
import { looseTldtsOpt } from '../constants/loose-tldts-opt'; import { looseTldtsOpt } from '../constants/loose-tldts-opt';
export const compare = (a: string, b: string) => { export const compare = (a: string, b: string) => {
@ -58,5 +57,5 @@ export const sortDomains = (
return t; return t;
}; };
return sort(inputs, sorter); return inputs.sort(sorter);
}; };

View File

@ -2,6 +2,8 @@
// This module is browser compatible. // This module is browser compatible.
// Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun. // Modified by Sukka (https://skk.moe) to increase compatibility and performance with Bun.
import { TransformStream } from 'stream/web';
interface TextLineStreamOptions { interface TextLineStreamOptions {
/** Allow splitting by solo \r */ /** Allow splitting by solo \r */
allowCR?: boolean allowCR?: boolean

View File

@ -1,956 +0,0 @@
type Comparator<T> = (a: T, b: T) => number;
/**
* Default minimum size of a run.
*/
const DEFAULT_MIN_MERGE = 32;
/**
* Minimum ordered subsequece required to do galloping.
*/
const DEFAULT_MIN_GALLOPING = 7;
/**
* Default tmp storage length. Can increase depending on the size of the
* smallest run to merge.
*/
const DEFAULT_TMP_STORAGE_LENGTH = 256;
/**
* Pre-computed powers of 10 for efficient lexicographic comparison of
* small integers.
*/
const POWERS_OF_TEN = [1e0, 1e1, 1e2, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9];
/**
* Estimate the logarithm base 10 of a small integer.
*
* @param x - The integer to estimate the logarithm of.
* @return {number} - The estimated logarithm of the integer.
*/
function log10(x: number): number {
if (x < 1e5) {
if (x < 1e2) {
return x < 1e1 ? 0 : 1;
}
if (x < 1e4) {
return x < 1e3 ? 2 : 3;
}
return 4;
}
if (x < 1e7) {
return x < 1e6 ? 5 : 6;
}
if (x < 1e9) {
return x < 1e8 ? 7 : 8;
}
return 9;
}
/**
* Default alphabetical comparison of items.
*
* @param a - First element to compare.
* @param b - Second element to compare.
* @return - A positive number if a.toString() > b.toString(), a
* negative number if .toString() < b.toString(), 0 otherwise.
*/
function alphabeticalCompare(a: any, b: any): number {
if (a === b) {
return 0;
}
if (~~a === a && ~~b === b) {
if (a === 0 || b === 0) {
return a < b ? -1 : 1;
}
if (a < 0 || b < 0) {
if (b >= 0) {
return -1;
}
if (a >= 0) {
return 1;
}
a = -a;
b = -b;
}
const al = log10(a);
const bl = log10(b);
let t = 0;
if (al < bl) {
a *= POWERS_OF_TEN[bl - al - 1];
b /= 10;
t = -1;
} else if (al > bl) {
b *= POWERS_OF_TEN[al - bl - 1];
a /= 10;
t = 1;
}
if (a === b) {
return t;
}
return a < b ? -1 : 1;
}
const aStr = String(a);
const bStr = String(b);
if (aStr === bStr) {
return 0;
}
return aStr < bStr ? -1 : 1;
}
/**
* Compute minimum run length for TimSort
*
* @param n - The size of the array to sort.
*/
function minRunLength(n: number) {
let r = 0;
while (n >= DEFAULT_MIN_MERGE) {
r |= (n & 1);
n >>= 1;
}
return n + r;
}
/**
* Counts the length of a monotonically ascending or strictly monotonically
* descending sequence (run) starting at array[lo] in the range [lo, hi). If
* the run is descending it is made ascending.
*
* @param array - The array to reverse.
* @param lo - First element in the range (inclusive).
* @param hi - Last element in the range.
* @param compare - Item comparison function.
* @return - The length of the run.
*/
function makeAscendingRun<T>(array: T[], lo: number, hi: number, compare: Comparator<T>): number {
let runHi = lo + 1;
if (runHi === hi) {
return 1;
}
// Descending
if (compare(array[runHi++], array[lo]) < 0) {
while (runHi < hi && compare(array[runHi], array[runHi - 1]) < 0) {
runHi++;
}
reverseRun(array, lo, runHi);
// Ascending
} else {
while (runHi < hi && compare(array[runHi], array[runHi - 1]) >= 0) {
runHi++;
}
}
return runHi - lo;
}
/**
* Reverse an array in the range [lo, hi).
*
* @param array - The array to reverse.
* @param lo - First element in the range (inclusive).
* @param hi - Last element in the range.
*/
function reverseRun<T>(array: T[], lo: number, hi: number) {
hi--;
while (lo < hi) {
const t = array[lo];
array[lo++] = array[hi];
array[hi--] = t;
}
}
/**
* Perform the binary sort of the array in the range [lo, hi) where start is
* the first element possibly out of order.
*
* @param array - The array to sort.
* @param lo - First element in the range (inclusive).
* @param hi - Last element in the range.
* @param start - First element possibly out of order.
* @param compare - Item comparison function.
*/
function binaryInsertionSort<T>(array: T[], lo: number, hi: number, start: number, compare: Comparator<T>) {
if (start === lo) {
start++;
}
for (; start < hi; start++) {
const pivot = array[start];
// Ranges of the array where pivot belongs
let left = lo;
let right = start;
/*
* pivot >= array[i] for i in [lo, left)
* pivot < array[i] for i in in [right, start)
*/
while (left < right) {
const mid = (left + right) >>> 1;
if (compare(pivot, array[mid]) < 0) {
right = mid;
} else {
left = mid + 1;
}
}
/*
* Move elements right to make room for the pivot. If there are elements
* equal to pivot, left points to the first slot after them: this is also
* a reason for which TimSort is stable
*/
let n = start - left;
// Switch is just an optimization for small arrays
switch (n) {
case 3:
array[left + 3] = array[left + 2];
/* falls through */
case 2:
array[left + 2] = array[left + 1];
/* falls through */
case 1:
array[left + 1] = array[left];
break;
default:
while (n > 0) {
array[left + n] = array[left + n - 1];
n--;
}
}
array[left] = pivot;
}
}
/**
* Find the position at which to insert a value in a sorted range. If the range
* contains elements equal to the value the leftmost element index is returned
* (for stability).
*
* @param value - Value to insert.
* @param array - The array in which to insert value.
* @param start - First element in the range.
* @param length - Length of the range.
* @param hint - The index at which to begin the search.
* @param compare - Item comparison function.
* @return - The index where to insert value.
*/
function gallopLeft<T>(value: T, array: T[], start: number, length: number, hint: number, compare: Comparator<T>): number {
let lastOffset = 0;
let maxOffset = 0;
let offset = 1;
if (compare(value, array[start + hint]) > 0) {
maxOffset = length - hint;
while (offset < maxOffset && compare(value, array[start + hint + offset]) > 0) {
lastOffset = offset;
offset = (offset << 1) + 1;
if (offset <= 0) {
offset = maxOffset;
}
}
if (offset > maxOffset) {
offset = maxOffset;
}
// Make offsets relative to start
lastOffset += hint;
offset += hint;
// value <= array[start + hint]
} else {
maxOffset = hint + 1;
while (offset < maxOffset && compare(value, array[start + hint - offset]) <= 0) {
lastOffset = offset;
offset = (offset << 1) + 1;
if (offset <= 0) {
offset = maxOffset;
}
}
if (offset > maxOffset) {
offset = maxOffset;
}
// Make offsets relative to start
const tmp = lastOffset;
lastOffset = hint - offset;
offset = hint - tmp;
}
/*
* Now array[start+lastOffset] < value <= array[start+offset], so value
* belongs somewhere in the range (start + lastOffset, start + offset]. Do a
* binary search, with invariant array[start + lastOffset - 1] < value <=
* array[start + offset].
*/
lastOffset++;
while (lastOffset < offset) {
const m = lastOffset + ((offset - lastOffset) >>> 1);
if (compare(value, array[start + m]) > 0) {
lastOffset = m + 1;
} else {
offset = m;
}
}
return offset;
}
/**
* Find the position at which to insert a value in a sorted range. If the range
* contains elements equal to the value the rightmost element index is returned
* (for stability).
*
* @param value - Value to insert.
* @param array - The array in which to insert value.
* @param start - First element in the range.
* @param length - Length of the range.
* @param hint - The index at which to begin the search.
* @param compare - Item comparison function.
* @return - The index where to insert value.
*/
function gallopRight<T>(value: T, array: T[], start: number, length: number, hint: number, compare: Comparator<T>): number {
let lastOffset = 0;
let maxOffset = 0;
let offset = 1;
if (compare(value, array[start + hint]) < 0) {
maxOffset = hint + 1;
while (offset < maxOffset && compare(value, array[start + hint - offset]) < 0) {
lastOffset = offset;
offset = (offset << 1) + 1;
if (offset <= 0) {
offset = maxOffset;
}
}
if (offset > maxOffset) {
offset = maxOffset;
}
// Make offsets relative to start
const tmp = lastOffset;
lastOffset = hint - offset;
offset = hint - tmp;
// value >= array[start + hint]
} else {
maxOffset = length - hint;
while (offset < maxOffset && compare(value, array[start + hint + offset]) >= 0) {
lastOffset = offset;
offset = (offset << 1) + 1;
if (offset <= 0) {
offset = maxOffset;
}
}
if (offset > maxOffset) {
offset = maxOffset;
}
// Make offsets relative to start
lastOffset += hint;
offset += hint;
}
/*
* Now array[start+lastOffset] < value <= array[start+offset], so value
* belongs somewhere in the range (start + lastOffset, start + offset]. Do a
* binary search, with invariant array[start + lastOffset - 1] < value <=
* array[start + offset].
*/
lastOffset++;
while (lastOffset < offset) {
const m = lastOffset + ((offset - lastOffset) >>> 1);
if (compare(value, array[start + m]) < 0) {
offset = m;
} else {
lastOffset = m + 1;
}
}
return offset;
}
class TimSort<T> {
tmp: T[];
minGallop = DEFAULT_MIN_GALLOPING;
length = 0;
tmpStorageLength = DEFAULT_TMP_STORAGE_LENGTH;
stackLength = 0;
runStart: number[];
runLength: number[];
stackSize = 0;
constructor(public array: T[], public compare: Comparator<T>) {
this.length = array.length;
if (this.length < 2 * DEFAULT_TMP_STORAGE_LENGTH) {
this.tmpStorageLength = this.length >>> 1;
}
this.tmp = new Array(this.tmpStorageLength);
if (this.length < 120) {
this.stackLength = 5;
} else if (this.length < 1542) {
this.stackLength = 10;
} else if (this.length < 119151) {
this.stackLength = 19;
} else {
this.stackLength = 40;
}
this.runStart = new Array(this.stackLength);
this.runLength = new Array(this.stackLength);
}
/**
* Push a new run on TimSort's stack.
*
* @param runStart - Start index of the run in the original array.
* @param runLength - Length of the run;
*/
pushRun(runStart: number, runLength: number) {
this.runStart[this.stackSize] = runStart;
this.runLength[this.stackSize] = runLength;
this.stackSize += 1;
}
/**
* Merge runs on TimSort's stack so that the following holds for all i:
* 1) runLength[i - 3] > runLength[i - 2] + runLength[i - 1]
* 2) runLength[i - 2] > runLength[i - 1]
*/
mergeRuns() {
while (this.stackSize > 1) {
let n = this.stackSize - 2;
if ((n >= 1
&& this.runLength[n - 1] <= this.runLength[n] + this.runLength[n + 1])
|| (n >= 2
&& this.runLength[n - 2] <= this.runLength[n] + this.runLength[n - 1])) {
if (this.runLength[n - 1] < this.runLength[n + 1]) {
n--;
}
} else if (this.runLength[n] > this.runLength[n + 1]) {
break;
}
this.mergeAt(n);
}
}
/**
* Merge all runs on TimSort's stack until only one remains.
*/
forceMergeRuns() {
while (this.stackSize > 1) {
let n = this.stackSize - 2;
if (n > 0 && this.runLength[n - 1] < this.runLength[n + 1]) {
n--;
}
this.mergeAt(n);
}
}
/**
* Merge the runs on the stack at positions i and i+1. Must be always be called
* with i=stackSize-2 or i=stackSize-3 (that is, we merge on top of the stack).
*
* @param i - Index of the run to merge in TimSort's stack.
*/
mergeAt(i: number) {
const compare = this.compare;
const array = this.array;
let start1 = this.runStart[i];
let length1 = this.runLength[i];
const start2 = this.runStart[i + 1];
let length2 = this.runLength[i + 1];
this.runLength[i] = length1 + length2;
if (i === this.stackSize - 3) {
this.runStart[i + 1] = this.runStart[i + 2];
this.runLength[i + 1] = this.runLength[i + 2];
}
this.stackSize--;
/*
* Find where the first element in the second run goes in run1. Previous
* elements in run1 are already in place
*/
const k = gallopRight(array[start2], array, start1, length1, 0, compare);
start1 += k;
length1 -= k;
if (length1 === 0) {
return;
}
/*
* Find where the last element in the first run goes in run2. Next elements
* in run2 are already in place
*/
length2 = gallopLeft(array[start1 + length1 - 1], array, start2, length2, length2 - 1, compare);
if (length2 === 0) {
return;
}
/*
* Merge remaining runs. A tmp array with length = min(length1, length2) is
* used
*/
if (length1 <= length2) {
this.mergeLow(start1, length1, start2, length2);
} else {
this.mergeHigh(start1, length1, start2, length2);
}
}
/**
* Merge two adjacent runs in a stable way. The runs must be such that the
* first element of run1 is bigger than the first element in run2 and the
* last element of run1 is greater than all the elements in run2.
* The method should be called when run1.length <= run2.length as it uses
* TimSort temporary array to store run1. Use mergeHigh if run1.length >
* run2.length.
*
* @param start1 - First element in run1.
* @param length1 - Length of run1.
* @param start2 - First element in run2.
* @param length2 - Length of run2.
*/
mergeLow(start1: number, length1: number, start2: number, length2: number) {
const compare = this.compare;
const array = this.array;
const tmp = this.tmp;
let i = 0;
for (i = 0; i < length1; i++) {
tmp[i] = array[start1 + i];
}
let cursor1 = 0;
let cursor2 = start2;
let dest = start1;
array[dest++] = array[cursor2++];
if (--length2 === 0) {
for (i = 0; i < length1; i++) {
array[dest + i] = tmp[cursor1 + i];
}
return;
}
if (length1 === 1) {
for (i = 0; i < length2; i++) {
array[dest + i] = array[cursor2 + i];
}
array[dest + length2] = tmp[cursor1];
return;
}
let minGallop = this.minGallop;
while (true) {
let count1 = 0;
let count2 = 0;
let exit = false;
do {
if (compare(array[cursor2], tmp[cursor1]) < 0) {
array[dest++] = array[cursor2++];
count2++;
count1 = 0;
if (--length2 === 0) {
exit = true;
break;
}
} else {
array[dest++] = tmp[cursor1++];
count1++;
count2 = 0;
if (--length1 === 1) {
exit = true;
break;
}
}
} while ((count1 | count2) < minGallop);
if (exit) {
break;
}
do {
count1 = gallopRight(array[cursor2], tmp, cursor1, length1, 0, compare);
if (count1 !== 0) {
for (i = 0; i < count1; i++) {
array[dest + i] = tmp[cursor1 + i];
}
dest += count1;
cursor1 += count1;
length1 -= count1;
if (length1 <= 1) {
exit = true;
break;
}
}
array[dest++] = array[cursor2++];
if (--length2 === 0) {
exit = true;
break;
}
count2 = gallopLeft(tmp[cursor1], array, cursor2, length2, 0, compare);
if (count2 !== 0) {
for (i = 0; i < count2; i++) {
array[dest + i] = array[cursor2 + i];
}
dest += count2;
cursor2 += count2;
length2 -= count2;
if (length2 === 0) {
exit = true;
break;
}
}
array[dest++] = tmp[cursor1++];
if (--length1 === 1) {
exit = true;
break;
}
minGallop--;
} while (count1 >= DEFAULT_MIN_GALLOPING || count2 >= DEFAULT_MIN_GALLOPING);
if (exit) {
break;
}
if (minGallop < 0) {
minGallop = 0;
}
minGallop += 2;
}
this.minGallop = minGallop;
if (minGallop < 1) {
this.minGallop = 1;
}
if (length1 === 1) {
for (i = 0; i < length2; i++) {
array[dest + i] = array[cursor2 + i];
}
array[dest + length2] = tmp[cursor1];
} else if (length1 === 0) {
// do nothing
} else {
for (i = 0; i < length1; i++) {
array[dest + i] = tmp[cursor1 + i];
}
}
}
/**
* Merge two adjacent runs in a stable way. The runs must be such that the
* first element of run1 is bigger than the first element in run2 and the
* last element of run1 is greater than all the elements in run2.
* The method should be called when run1.length > run2.length as it uses
* TimSort temporary array to store run2. Use mergeLow if run1.length <=
* run2.length.
*
* @param start1 - First element in run1.
* @param length1 - Length of run1.
* @param start2 - First element in run2.
* @param length2 - Length of run2.
*/
mergeHigh(start1: number, length1: number, start2: number, length2: number) {
const compare = this.compare;
const array = this.array;
const tmp = this.tmp;
let i = 0;
for (i = 0; i < length2; i++) {
tmp[i] = array[start2 + i];
}
let cursor1 = start1 + length1 - 1;
let cursor2 = length2 - 1;
let dest = start2 + length2 - 1;
let customCursor = 0;
let customDest = 0;
array[dest--] = array[cursor1--];
if (--length1 === 0) {
customCursor = dest - (length2 - 1);
for (i = 0; i < length2; i++) {
array[customCursor + i] = tmp[i];
}
return;
}
if (length2 === 1) {
dest -= length1;
cursor1 -= length1;
customDest = dest + 1;
customCursor = cursor1 + 1;
for (i = length1 - 1; i >= 0; i--) {
array[customDest + i] = array[customCursor + i];
}
array[dest] = tmp[cursor2];
return;
}
let minGallop = this.minGallop;
while (true) {
let count1 = 0;
let count2 = 0;
let exit = false;
do {
if (compare(tmp[cursor2], array[cursor1]) < 0) {
array[dest--] = array[cursor1--];
count1++;
count2 = 0;
if (--length1 === 0) {
exit = true;
break;
}
} else {
array[dest--] = tmp[cursor2--];
count2++;
count1 = 0;
if (--length2 === 1) {
exit = true;
break;
}
}
} while ((count1 | count2) < minGallop);
if (exit) {
break;
}
do {
count1 = length1 - gallopRight(tmp[cursor2], array, start1, length1, length1 - 1, compare);
if (count1 !== 0) {
dest -= count1;
cursor1 -= count1;
length1 -= count1;
customDest = dest + 1;
customCursor = cursor1 + 1;
for (i = count1 - 1; i >= 0; i--) {
array[customDest + i] = array[customCursor + i];
}
if (length1 === 0) {
exit = true;
break;
}
}
array[dest--] = tmp[cursor2--];
if (--length2 === 1) {
exit = true;
break;
}
count2 = length2 - gallopLeft(array[cursor1], tmp, 0, length2, length2 - 1, compare);
if (count2 !== 0) {
dest -= count2;
cursor2 -= count2;
length2 -= count2;
customDest = dest + 1;
customCursor = cursor2 + 1;
for (i = 0; i < count2; i++) {
array[customDest + i] = tmp[customCursor + i];
}
if (length2 <= 1) {
exit = true;
break;
}
}
array[dest--] = array[cursor1--];
if (--length1 === 0) {
exit = true;
break;
}
minGallop--;
} while (count1 >= DEFAULT_MIN_GALLOPING || count2 >= DEFAULT_MIN_GALLOPING);
if (exit) {
break;
}
if (minGallop < 0) {
minGallop = 0;
}
minGallop += 2;
}
this.minGallop = minGallop;
if (minGallop < 1) {
this.minGallop = 1;
}
if (length2 === 1) {
dest -= length1;
cursor1 -= length1;
customDest = dest + 1;
customCursor = cursor1 + 1;
for (i = length1 - 1; i >= 0; i--) {
array[customDest + i] = array[customCursor + i];
}
array[dest] = tmp[cursor2];
} else if (length2 === 0) {
// do nothing
} else {
customCursor = dest - (length2 - 1);
for (i = 0; i < length2; i++) {
array[customCursor + i] = tmp[i];
}
}
}
}
/**
* Sort an array in the range [lo, hi) using TimSort.
*
* @param array - The array to sort.
* @param compare - Item comparison function. Default is
* alphabetical
* @param lo - First element in the range (inclusive).
* @param hi - Last element in the range.
* comparator.
*/
export function sort<T>(array: T[], compare: Comparator<T> | undefined = alphabeticalCompare, lo = 0, hi: number = array.length): T[] {
// if (!Array.isArray(array)) {
// throw new TypeError('Can only sort arrays');
// }
/*
* Handle the case where a comparison function is not provided. We do
* lexicographic sorting
*/
if (typeof compare !== 'function') {
hi = lo;
lo = compare;
compare = alphabeticalCompare;
}
let remaining = hi - lo;
// The array is already sorted
if (remaining < 2) {
return array;
}
let runLength = 0;
// On small arrays binary sort can be used directly
if (remaining < DEFAULT_MIN_MERGE) {
runLength = makeAscendingRun(array, lo, hi, compare);
binaryInsertionSort(array, lo, hi, lo + runLength, compare);
return array;
}
const ts = new TimSort(array, compare);
const minRun = minRunLength(remaining);
do {
runLength = makeAscendingRun(array, lo, hi, compare);
if (runLength < minRun) {
let force = remaining;
if (force > minRun) {
force = minRun;
}
binaryInsertionSort(array, lo, lo + force, lo + runLength, compare);
runLength = force;
}
// Push new run and merge if necessary
ts.pushRun(lo, runLength);
ts.mergeRuns();
// Go find next run
remaining -= runLength;
lo += runLength;
} while (remaining !== 0);
// Force merging of remaining runs
ts.forceMergeRuns();
return array;
}

View File

@ -5,8 +5,6 @@
import { fastStringArrayJoin } from './misc'; import { fastStringArrayJoin } from './misc';
import { inspect } from 'util'; import { inspect } from 'util';
// const { Error, Bun, JSON, Symbol } = globalThis;
const noop = () => { /** noop */ }; const noop = () => { /** noop */ };
type TrieNode = [ type TrieNode = [

View File

@ -2,12 +2,7 @@
module.exports = require('eslint-config-sukka').sukka({ module.exports = require('eslint-config-sukka').sukka({
js: { js: {
disableNoConsoleInCLI: ['Build/**'], disableNoConsoleInCLI: ['Build/**']
env: {
customGlobals: {
Bun: 'readonly'
}
}
}, },
node: true, node: true,
ts: true ts: true

View File

@ -7,11 +7,12 @@
"type": "git", "type": "git",
"url": "git+https://github.com/SukkaW/Surge.git" "url": "git+https://github.com/SukkaW/Surge.git"
}, },
"type": "commonjs",
"scripts": { "scripts": {
"build": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true node -r @swc-node/register ./Build/index.ts", "node": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true node -r @swc-node/register",
"build-bun": "bun ./Build/index.ts", "dexnode": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true dexnode -r @swc-node/register",
"build-bun-stream": "ENABLE_TEXT_LINE_STREAM=true bun ./Build/index.ts", "build": "pnpm run node ./Build/index.ts",
"build-profile": "SWCRC=true ENABLE_TEXT_LINE_STREAM=true dexnode -r @swc-node/register ./Build/index.ts", "build-profile": "pnpm run dexnode -r @swc-node/register ./Build/index.ts",
"lint": "eslint --format=sukka ." "lint": "eslint --format=sukka ."
}, },
"author": "", "author": "",
@ -43,10 +44,8 @@
"@swc/core": "^1.7.0", "@swc/core": "^1.7.0",
"@types/async-retry": "^1.4.8", "@types/async-retry": "^1.4.8",
"@types/better-sqlite3": "^7.6.11", "@types/better-sqlite3": "^7.6.11",
"@types/bun": "^1.1.6",
"@types/punycode": "^2.1.4", "@types/punycode": "^2.1.4",
"@types/tar-stream": "^3.1.3", "@types/tar-stream": "^3.1.3",
"bun-types": "^1.1.20",
"eslint": "^9.7.0", "eslint": "^9.7.0",
"eslint-config-sukka": "^6.1.6", "eslint-config-sukka": "^6.1.6",
"eslint-formatter-sukka": "^6.1.6", "eslint-formatter-sukka": "^6.1.6",

53
pnpm-lock.yaml generated
View File

@ -84,18 +84,12 @@ importers:
'@types/better-sqlite3': '@types/better-sqlite3':
specifier: ^7.6.11 specifier: ^7.6.11
version: 7.6.11 version: 7.6.11
'@types/bun':
specifier: ^1.1.6
version: 1.1.6
'@types/punycode': '@types/punycode':
specifier: ^2.1.4 specifier: ^2.1.4
version: 2.1.4 version: 2.1.4
'@types/tar-stream': '@types/tar-stream':
specifier: ^3.1.3 specifier: ^3.1.3
version: 3.1.3 version: 3.1.3
bun-types:
specifier: ^1.1.20
version: 1.1.20
eslint: eslint:
specifier: ^9.7.0 specifier: ^9.7.0
version: 9.7.0 version: 9.7.0
@ -394,9 +388,6 @@ packages:
'@types/better-sqlite3@7.6.11': '@types/better-sqlite3@7.6.11':
resolution: {integrity: sha512-i8KcD3PgGtGBLl3+mMYA8PdKkButvPyARxA7IQAd6qeslht13qxb1zzO8dRCtE7U3IoJS782zDBAeoKiM695kg==} resolution: {integrity: sha512-i8KcD3PgGtGBLl3+mMYA8PdKkButvPyARxA7IQAd6qeslht13qxb1zzO8dRCtE7U3IoJS782zDBAeoKiM695kg==}
'@types/bun@1.1.6':
resolution: {integrity: sha512-uJgKjTdX0GkWEHZzQzFsJkWp5+43ZS7HC8sZPFnOwnSo1AsNl2q9o2bFeS23disNDqbggEgyFkKCHl/w8iZsMA==}
'@types/chrome@0.0.268': '@types/chrome@0.0.268':
resolution: {integrity: sha512-7N1QH9buudSJ7sI8Pe4mBHJr5oZ48s0hcanI9w3wgijAlv1OZNUZve9JR4x42dn5lJ5Sm87V1JNfnoh10EnQlA==} resolution: {integrity: sha512-7N1QH9buudSJ7sI8Pe4mBHJr5oZ48s0hcanI9w3wgijAlv1OZNUZve9JR4x42dn5lJ5Sm87V1JNfnoh10EnQlA==}
@ -421,9 +412,6 @@ packages:
'@types/json-schema@7.0.15': '@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/node@20.12.14':
resolution: {integrity: sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg==}
'@types/node@20.14.11': '@types/node@20.14.11':
resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==} resolution: {integrity: sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==}
@ -436,9 +424,6 @@ packages:
'@types/tar-stream@3.1.3': '@types/tar-stream@3.1.3':
resolution: {integrity: sha512-Zbnx4wpkWBMBSu5CytMbrT5ZpMiF55qgM+EpHzR4yIDu7mv52cej8hTkOc6K+LzpkOAbxwn/m7j3iO+/l42YkQ==} resolution: {integrity: sha512-Zbnx4wpkWBMBSu5CytMbrT5ZpMiF55qgM+EpHzR4yIDu7mv52cej8hTkOc6K+LzpkOAbxwn/m7j3iO+/l42YkQ==}
'@types/ws@8.5.11':
resolution: {integrity: sha512-4+q7P5h3SpJxaBft0Dzpbr6lmMaqh0Jr2tbhJZ/luAwvD7ohSCniYkwz/pLxuT2h0EOa6QADgJj1Ko+TzRfZ+w==}
'@typescript-eslint/eslint-plugin@8.0.0-alpha.45': '@typescript-eslint/eslint-plugin@8.0.0-alpha.45':
resolution: {integrity: sha512-h+pGHKWu+i5D6BmzpggG8bDj/fVVhxzQLE2CPsKtH1ab0QvUz+eyT/lIfz0xs8NF/lQS7tmlU5AYnQdKe1yAQw==} resolution: {integrity: sha512-h+pGHKWu+i5D6BmzpggG8bDj/fVVhxzQLE2CPsKtH1ab0QvUz+eyT/lIfz0xs8NF/lQS7tmlU5AYnQdKe1yAQw==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@ -481,10 +466,6 @@ packages:
resolution: {integrity: sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A==} resolution: {integrity: sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A==}
engines: {node: ^18.18.0 || >=20.0.0} engines: {node: ^18.18.0 || >=20.0.0}
'@typescript-eslint/types@8.0.0-alpha.36':
resolution: {integrity: sha512-D+w5uE8Y83K/P5VQZyKKi4pwTL2YkWOwtQOVJQI38Rp8f3pmY+Jmcps3wkSFSJK8wifTlvoHwwIBf1FsdCW/EA==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
'@typescript-eslint/types@8.0.0-alpha.45': '@typescript-eslint/types@8.0.0-alpha.45':
resolution: {integrity: sha512-yjTlmcSnkFV8IoqE0vinmWo+fl7TjkaGyGX/g9gKN/b2IO8g+AimB7BhilmlBqvZupvo2AfiHqcnZEVhQAXI8w==} resolution: {integrity: sha512-yjTlmcSnkFV8IoqE0vinmWo+fl7TjkaGyGX/g9gKN/b2IO8g+AimB7BhilmlBqvZupvo2AfiHqcnZEVhQAXI8w==}
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@ -605,12 +586,6 @@ packages:
buffer@5.7.1: buffer@5.7.1:
resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
bun-types@1.1.17:
resolution: {integrity: sha512-Z4+OplcSd/YZq7ZsrfD00DKJeCwuNY96a1IDJyR73+cTBaFIS7SC6LhpY/W3AMEXO9iYq5NJ58WAwnwL1p5vKg==}
bun-types@1.1.20:
resolution: {integrity: sha512-2u84HciDR3E7Uc0t0AEeXHmQAWe9uzRKTz120D3silIJOQlbGIMJMJiGaM8Yx7nEvMyfV0LfSdkEGnb77AN5AA==}
callsites@3.1.0: callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'} engines: {node: '>=6'}
@ -1670,10 +1645,6 @@ snapshots:
dependencies: dependencies:
'@types/node': 20.14.11 '@types/node': 20.14.11
'@types/bun@1.1.6':
dependencies:
bun-types: 1.1.17
'@types/chrome@0.0.268': '@types/chrome@0.0.268':
dependencies: dependencies:
'@types/filesystem': 0.0.36 '@types/filesystem': 0.0.36
@ -1698,10 +1669,6 @@ snapshots:
'@types/json-schema@7.0.15': {} '@types/json-schema@7.0.15': {}
'@types/node@20.12.14':
dependencies:
undici-types: 5.26.5
'@types/node@20.14.11': '@types/node@20.14.11':
dependencies: dependencies:
undici-types: 5.26.5 undici-types: 5.26.5
@ -1714,10 +1681,6 @@ snapshots:
dependencies: dependencies:
'@types/node': 20.14.11 '@types/node': 20.14.11
'@types/ws@8.5.11':
dependencies:
'@types/node': 20.12.14
'@typescript-eslint/eslint-plugin@8.0.0-alpha.45(@typescript-eslint/parser@8.0.0-alpha.45(eslint@9.7.0)(typescript@5.5.4))(eslint@9.7.0)(typescript@5.5.4)': '@typescript-eslint/eslint-plugin@8.0.0-alpha.45(@typescript-eslint/parser@8.0.0-alpha.45(eslint@9.7.0)(typescript@5.5.4))(eslint@9.7.0)(typescript@5.5.4)':
dependencies: dependencies:
'@eslint-community/regexpp': 4.11.0 '@eslint-community/regexpp': 4.11.0
@ -1773,8 +1736,6 @@ snapshots:
'@typescript-eslint/types@7.17.0': {} '@typescript-eslint/types@7.17.0': {}
'@typescript-eslint/types@8.0.0-alpha.36': {}
'@typescript-eslint/types@8.0.0-alpha.45': {} '@typescript-eslint/types@8.0.0-alpha.45': {}
'@typescript-eslint/typescript-estree@7.17.0(typescript@5.5.4)': '@typescript-eslint/typescript-estree@7.17.0(typescript@5.5.4)':
@ -1794,7 +1755,7 @@ snapshots:
'@typescript-eslint/typescript-estree@8.0.0-alpha.45(typescript@5.5.4)': '@typescript-eslint/typescript-estree@8.0.0-alpha.45(typescript@5.5.4)':
dependencies: dependencies:
'@typescript-eslint/types': 8.0.0-alpha.36 '@typescript-eslint/types': 8.0.0-alpha.45
'@typescript-eslint/visitor-keys': 8.0.0-alpha.45 '@typescript-eslint/visitor-keys': 8.0.0-alpha.45
debug: 4.3.5 debug: 4.3.5
globby: 11.1.0 globby: 11.1.0
@ -1836,7 +1797,7 @@ snapshots:
'@typescript-eslint/visitor-keys@8.0.0-alpha.45': '@typescript-eslint/visitor-keys@8.0.0-alpha.45':
dependencies: dependencies:
'@typescript-eslint/types': 8.0.0-alpha.36 '@typescript-eslint/types': 8.0.0-alpha.45
eslint-visitor-keys: 3.4.3 eslint-visitor-keys: 3.4.3
acorn-jsx@5.3.2(acorn@8.12.1): acorn-jsx@5.3.2(acorn@8.12.1):
@ -1921,16 +1882,6 @@ snapshots:
base64-js: 1.5.1 base64-js: 1.5.1
ieee754: 1.2.1 ieee754: 1.2.1
bun-types@1.1.17:
dependencies:
'@types/node': 20.12.14
'@types/ws': 8.5.11
bun-types@1.1.20:
dependencies:
'@types/node': 20.12.14
'@types/ws': 8.5.11
callsites@3.1.0: {} callsites@3.1.0: {}
chalk@4.1.2: chalk@4.1.2: