Chore: refactor fetch text by line & load previous speedtest set

This commit is contained in:
SukkaW 2023-12-31 00:04:17 +08:00
parent a4458ee367
commit 48b3c4538e
2 changed files with 25 additions and 27 deletions

View File

@ -10,6 +10,8 @@ import { fetchWithRetry } from './lib/fetch-retry';
import { SHARED_DESCRIPTION } from './lib/constants'; import { SHARED_DESCRIPTION } from './lib/constants';
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix'; import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
import picocolors from 'picocolors'; import picocolors from 'picocolors';
import { fetchRemoteTextByLine } from './lib/fetch-text-by-line';
import { processLine } from './lib/process-line';
const s = new Sema(2); const s = new Sema(2);
@ -65,6 +67,7 @@ const querySpeedtestApi = async (keyword: string): Promise<Array<string | null>>
}; };
export const buildSpeedtestDomainSet = task(import.meta.path, async () => { export const buildSpeedtestDomainSet = task(import.meta.path, async () => {
// Predefined domainset
/** @type {Set<string>} */ /** @type {Set<string>} */
const domains = new Set<string>([ const domains = new Set<string>([
'.speedtest.net', '.speedtest.net',
@ -130,6 +133,14 @@ export const buildSpeedtestDomainSet = task(import.meta.path, async () => {
'.backend.librespeed.org' '.backend.librespeed.org'
]); ]);
// Download previous speedtest domainset
for await (const l of await fetchRemoteTextByLine('https://ruleset.skk.moe/List/domainset/speedtest.conf')) {
const line = processLine(l);
if (line) {
domains.add(line);
}
}
let timer; let timer;
const pMap = ([ const pMap = ([

View File

@ -23,16 +23,10 @@ import { fetchWithRetry, defaultRequestInit } from './fetch-retry';
const decoder = new TextDecoder('utf-8'); const decoder = new TextDecoder('utf-8');
export async function *readFileByLine(file: string | URL | BunFile): AsyncGenerator<string> { async function *createTextLineAsyncGeneratorFromStreamSource(stream: ReadableStream<Uint8Array>): AsyncGenerator<string> {
if (typeof file === 'string') {
file = Bun.file(file);
} else if (!('writer' in file)) {
file = Bun.file(file);
}
let buf = ''; let buf = '';
for await (const chunk of file.stream()) { for await (const chunk of stream) {
const chunkStr = decoder.decode(chunk).replaceAll('\r\n', '\n'); const chunkStr = decoder.decode(chunk).replaceAll('\r\n', '\n');
for (let i = 0, len = chunkStr.length; i < len; i++) { for (let i = 0, len = chunkStr.length; i < len; i++) {
const char = chunkStr[i]; const char = chunkStr[i];
@ -50,7 +44,17 @@ export async function *readFileByLine(file: string | URL | BunFile): AsyncGenera
} }
} }
export async function *createReadlineInterfaceFromResponse(resp: Response): AsyncGenerator<string> { export function readFileByLine(file: string | URL | BunFile): AsyncGenerator<string> {
if (typeof file === 'string') {
file = Bun.file(file);
} else if (!('writer' in file)) {
file = Bun.file(file);
}
return createTextLineAsyncGeneratorFromStreamSource(file.stream());
}
export function createReadlineInterfaceFromResponse(resp: Response): AsyncGenerator<string> {
if (!resp.body) { if (!resp.body) {
throw new Error('Failed to fetch remote text'); throw new Error('Failed to fetch remote text');
} }
@ -58,24 +62,7 @@ export async function *createReadlineInterfaceFromResponse(resp: Response): Asyn
throw new Error('Body has already been consumed.'); throw new Error('Body has already been consumed.');
} }
let buf = ''; return createTextLineAsyncGeneratorFromStreamSource(resp.body);
for await (const chunk of resp.body) {
const chunkStr = decoder.decode(chunk).replaceAll('\r\n', '\n');
for (let i = 0, len = chunkStr.length; i < len; i++) {
const char = chunkStr[i];
if (char === '\n') {
yield buf;
buf = '';
} else {
buf += char;
}
}
}
if (buf) {
yield buf;
}
} }
export function fetchRemoteTextByLine(url: string | URL) { export function fetchRemoteTextByLine(url: string | URL) {