mirror of
https://github.com/SukkaW/Surge.git
synced 2025-12-14 02:00:37 +08:00
Perf: faster FIFO impl
This commit is contained in:
parent
58cea58c3c
commit
8851d7fb38
@ -1,63 +0,0 @@
|
|||||||
class Node<T> {
|
|
||||||
next?: Node<T>;
|
|
||||||
|
|
||||||
constructor(public readonly value: T) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default class FIFO<T> {
|
|
||||||
private head?: Node<T>;
|
|
||||||
private tail?: Node<T>;
|
|
||||||
public $size = 0;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
enqueue(value: T) {
|
|
||||||
const node = new Node<T>(value);
|
|
||||||
|
|
||||||
if (this.head) {
|
|
||||||
this.tail!.next = node;
|
|
||||||
this.tail = node;
|
|
||||||
} else {
|
|
||||||
this.head = node;
|
|
||||||
this.tail = node;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.$size++;
|
|
||||||
}
|
|
||||||
|
|
||||||
dequeue() {
|
|
||||||
const current = this.head;
|
|
||||||
if (!current) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.head = this.head!.next;
|
|
||||||
this.$size--;
|
|
||||||
return current.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
peek() {
|
|
||||||
return this.head?.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
clear() {
|
|
||||||
this.head = undefined;
|
|
||||||
this.tail = undefined;
|
|
||||||
this.$size = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
get size() {
|
|
||||||
return this.$size;
|
|
||||||
}
|
|
||||||
|
|
||||||
*[Symbol.iterator]() {
|
|
||||||
let current = this.head;
|
|
||||||
|
|
||||||
while (current) {
|
|
||||||
yield current.value;
|
|
||||||
current = current.next;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -6,7 +6,7 @@ import { fastStringCompare } from './misc';
|
|||||||
import util from 'node:util';
|
import util from 'node:util';
|
||||||
import { noop } from 'foxts/noop';
|
import { noop } from 'foxts/noop';
|
||||||
import { fastStringArrayJoin } from 'foxts/fast-string-array-join';
|
import { fastStringArrayJoin } from 'foxts/fast-string-array-join';
|
||||||
import FIFO from './fifo';
|
import FIFO from 'fast-fifo';
|
||||||
|
|
||||||
type TrieNode<Meta = any> = [
|
type TrieNode<Meta = any> = [
|
||||||
boolean, /** end */
|
boolean, /** end */
|
||||||
@ -181,14 +181,14 @@ abstract class Triebase<Meta = any> {
|
|||||||
private static bfsResults: [node: TrieNode | null, suffix: string[]] = [null, []];
|
private static bfsResults: [node: TrieNode | null, suffix: string[]] = [null, []];
|
||||||
|
|
||||||
private static bfs<Meta>(this: void, nodeStack: FIFO<TrieNode<Meta>>, suffixStack: FIFO<string[]>) {
|
private static bfs<Meta>(this: void, nodeStack: FIFO<TrieNode<Meta>>, suffixStack: FIFO<string[]>) {
|
||||||
const node = nodeStack.dequeue()!;
|
const node = nodeStack.shift()!;
|
||||||
const suffix = suffixStack.dequeue()!;
|
const suffix = suffixStack.shift()!;
|
||||||
|
|
||||||
node[3].forEach((childNode, k) => {
|
node[3].forEach((childNode, k) => {
|
||||||
// Pushing the child node to the stack for next iteration of DFS
|
// Pushing the child node to the stack for next iteration of DFS
|
||||||
nodeStack.enqueue(childNode);
|
nodeStack.push(childNode);
|
||||||
|
|
||||||
suffixStack.enqueue([k, ...suffix]);
|
suffixStack.push([k, ...suffix]);
|
||||||
});
|
});
|
||||||
|
|
||||||
Triebase.bfsResults[0] = node;
|
Triebase.bfsResults[0] = node;
|
||||||
@ -198,8 +198,8 @@ abstract class Triebase<Meta = any> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static bfsWithSort<Meta>(this: void, nodeStack: FIFO<TrieNode<Meta>>, suffixStack: FIFO<string[]>) {
|
private static bfsWithSort<Meta>(this: void, nodeStack: FIFO<TrieNode<Meta>>, suffixStack: FIFO<string[]>) {
|
||||||
const node = nodeStack.dequeue()!;
|
const node = nodeStack.shift()!;
|
||||||
const suffix = suffixStack.dequeue()!;
|
const suffix = suffixStack.shift()!;
|
||||||
|
|
||||||
if (node[3].size) {
|
if (node[3].size) {
|
||||||
const keys = Array.from(node[3].keys()).sort(Triebase.compare);
|
const keys = Array.from(node[3].keys()).sort(Triebase.compare);
|
||||||
@ -209,8 +209,8 @@ abstract class Triebase<Meta = any> {
|
|||||||
const childNode = node[3].get(key)!;
|
const childNode = node[3].get(key)!;
|
||||||
|
|
||||||
// Pushing the child node to the stack for next iteration of DFS
|
// Pushing the child node to the stack for next iteration of DFS
|
||||||
nodeStack.enqueue(childNode);
|
nodeStack.push(childNode);
|
||||||
suffixStack.enqueue([key, ...suffix]);
|
suffixStack.push([key, ...suffix]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,11 +229,11 @@ abstract class Triebase<Meta = any> {
|
|||||||
const bfsImpl = withSort ? Triebase.bfsWithSort : Triebase.bfs;
|
const bfsImpl = withSort ? Triebase.bfsWithSort : Triebase.bfs;
|
||||||
|
|
||||||
const nodeStack = new FIFO<TrieNode<Meta>>();
|
const nodeStack = new FIFO<TrieNode<Meta>>();
|
||||||
nodeStack.enqueue(initialNode);
|
nodeStack.push(initialNode);
|
||||||
|
|
||||||
// Resolving initial string (begin the start of the stack)
|
// Resolving initial string (begin the start of the stack)
|
||||||
const suffixStack = new FIFO<string[]>();
|
const suffixStack = new FIFO<string[]>();
|
||||||
suffixStack.enqueue(initialSuffix);
|
suffixStack.push(initialSuffix);
|
||||||
|
|
||||||
let node: TrieNode<Meta> = initialNode;
|
let node: TrieNode<Meta> = initialNode;
|
||||||
let r;
|
let r;
|
||||||
@ -247,7 +247,7 @@ abstract class Triebase<Meta = any> {
|
|||||||
if (node[0]) {
|
if (node[0]) {
|
||||||
onMatches(suffix, node[1], node[4]);
|
onMatches(suffix, node[1], node[4]);
|
||||||
}
|
}
|
||||||
} while (nodeStack.size);
|
} while (nodeStack.length);
|
||||||
};
|
};
|
||||||
|
|
||||||
static compare(this: void, a: string, b: string) {
|
static compare(this: void, a: string, b: string) {
|
||||||
@ -261,17 +261,17 @@ abstract class Triebase<Meta = any> {
|
|||||||
initialSuffix: string[] = []
|
initialSuffix: string[] = []
|
||||||
) {
|
) {
|
||||||
const nodeStack = new FIFO<TrieNode<Meta>>();
|
const nodeStack = new FIFO<TrieNode<Meta>>();
|
||||||
nodeStack.enqueue(initialNode);
|
nodeStack.push(initialNode);
|
||||||
|
|
||||||
// Resolving initial string (begin the start of the stack)
|
// Resolving initial string (begin the start of the stack)
|
||||||
const suffixStack = new FIFO<string[]>();
|
const suffixStack = new FIFO<string[]>();
|
||||||
suffixStack.enqueue(initialSuffix);
|
suffixStack.push(initialSuffix);
|
||||||
|
|
||||||
let node: TrieNode<Meta> = initialNode;
|
let node: TrieNode<Meta> = initialNode;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
node = nodeStack.dequeue()!;
|
node = nodeStack.shift()!;
|
||||||
const suffix = suffixStack.dequeue()!;
|
const suffix = suffixStack.shift()!;
|
||||||
|
|
||||||
if (node[3].size) {
|
if (node[3].size) {
|
||||||
const keys = Array.from(node[3].keys()).sort(Triebase.compare);
|
const keys = Array.from(node[3].keys()).sort(Triebase.compare);
|
||||||
@ -281,8 +281,8 @@ abstract class Triebase<Meta = any> {
|
|||||||
const childNode = node[3].get(key)!;
|
const childNode = node[3].get(key)!;
|
||||||
|
|
||||||
// Pushing the child node to the stack for next iteration of DFS
|
// Pushing the child node to the stack for next iteration of DFS
|
||||||
nodeStack.enqueue(childNode);
|
nodeStack.push(childNode);
|
||||||
suffixStack.enqueue([key, ...suffix]);
|
suffixStack.push([key, ...suffix]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -290,7 +290,7 @@ abstract class Triebase<Meta = any> {
|
|||||||
if (node[0]) {
|
if (node[0]) {
|
||||||
onMatches(suffix, node[1], node[4]);
|
onMatches(suffix, node[1], node[4]);
|
||||||
}
|
}
|
||||||
} while (nodeStack.size);
|
} while (nodeStack.length);
|
||||||
};
|
};
|
||||||
|
|
||||||
protected getSingleChildLeaf(tokens: string[]): FindSingleChildLeafResult<Meta> | null {
|
protected getSingleChildLeaf(tokens: string[]): FindSingleChildLeafResult<Meta> | null {
|
||||||
|
|||||||
@ -31,6 +31,7 @@
|
|||||||
"dns2": "^2.1.0",
|
"dns2": "^2.1.0",
|
||||||
"escape-string-regexp-node": "^1.0.2",
|
"escape-string-regexp-node": "^1.0.2",
|
||||||
"fast-cidr-tools": "^0.3.1",
|
"fast-cidr-tools": "^0.3.1",
|
||||||
|
"fast-fifo": "^1.3.2",
|
||||||
"fdir": "^6.4.2",
|
"fdir": "^6.4.2",
|
||||||
"foxts": "1.0.7",
|
"foxts": "1.0.7",
|
||||||
"hash-wasm": "^4.12.0",
|
"hash-wasm": "^4.12.0",
|
||||||
@ -55,6 +56,7 @@
|
|||||||
"@types/better-sqlite3": "^7.6.12",
|
"@types/better-sqlite3": "^7.6.12",
|
||||||
"@types/cacache": "^17.0.2",
|
"@types/cacache": "^17.0.2",
|
||||||
"@types/dns2": "^2.0.9",
|
"@types/dns2": "^2.0.9",
|
||||||
|
"@types/fast-fifo": "^1.3.0",
|
||||||
"@types/make-fetch-happen": "^10.0.4",
|
"@types/make-fetch-happen": "^10.0.4",
|
||||||
"@types/mocha": "^10.0.10",
|
"@types/mocha": "^10.0.10",
|
||||||
"@types/node": "^22.10.2",
|
"@types/node": "^22.10.2",
|
||||||
|
|||||||
11
pnpm-lock.yaml
generated
11
pnpm-lock.yaml
generated
@ -44,6 +44,9 @@ importers:
|
|||||||
fast-cidr-tools:
|
fast-cidr-tools:
|
||||||
specifier: ^0.3.1
|
specifier: ^0.3.1
|
||||||
version: 0.3.1
|
version: 0.3.1
|
||||||
|
fast-fifo:
|
||||||
|
specifier: ^1.3.2
|
||||||
|
version: 1.3.2
|
||||||
fdir:
|
fdir:
|
||||||
specifier: ^6.4.2
|
specifier: ^6.4.2
|
||||||
version: 6.4.2(picomatch@4.0.2)
|
version: 6.4.2(picomatch@4.0.2)
|
||||||
@ -111,6 +114,9 @@ importers:
|
|||||||
'@types/dns2':
|
'@types/dns2':
|
||||||
specifier: ^2.0.9
|
specifier: ^2.0.9
|
||||||
version: 2.0.9
|
version: 2.0.9
|
||||||
|
'@types/fast-fifo':
|
||||||
|
specifier: ^1.3.0
|
||||||
|
version: 1.3.0
|
||||||
'@types/make-fetch-happen':
|
'@types/make-fetch-happen':
|
||||||
specifier: ^10.0.4
|
specifier: ^10.0.4
|
||||||
version: 10.0.4
|
version: 10.0.4
|
||||||
@ -513,6 +519,9 @@ packages:
|
|||||||
'@types/estree@1.0.6':
|
'@types/estree@1.0.6':
|
||||||
resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==}
|
resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==}
|
||||||
|
|
||||||
|
'@types/fast-fifo@1.3.0':
|
||||||
|
resolution: {integrity: sha512-xTVvla2QX5ruNmVUqCFc++xm42xl6RBaroppSleSczvM2rAwClr88MbnrgDGLIStt4RCVegeKO4z2HM5AHFOng==}
|
||||||
|
|
||||||
'@types/istanbul-lib-coverage@2.0.6':
|
'@types/istanbul-lib-coverage@2.0.6':
|
||||||
resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==}
|
resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==}
|
||||||
|
|
||||||
@ -2252,6 +2261,8 @@ snapshots:
|
|||||||
|
|
||||||
'@types/estree@1.0.6': {}
|
'@types/estree@1.0.6': {}
|
||||||
|
|
||||||
|
'@types/fast-fifo@1.3.0': {}
|
||||||
|
|
||||||
'@types/istanbul-lib-coverage@2.0.6': {}
|
'@types/istanbul-lib-coverage@2.0.6': {}
|
||||||
|
|
||||||
'@types/istanbul-lib-report@3.0.3':
|
'@types/istanbul-lib-report@3.0.3':
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user