mirror of
https://github.com/SukkaW/Surge.git
synced 2026-01-28 17:41:54 +08:00
Refactor/Perf: initial span tracer
This commit is contained in:
@@ -4,7 +4,7 @@ import path from 'path';
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { readFileByLine } from './lib/fetch-text-by-line';
|
||||
import { isCI } from 'ci-info';
|
||||
import { task } from './lib/trace-runner';
|
||||
import { task } from './trace';
|
||||
import { defaultRequestInit, fetchWithRetry } from './lib/fetch-retry';
|
||||
import tarStream from 'tar-stream';
|
||||
import zlib from 'zlib';
|
||||
@@ -13,29 +13,33 @@ import { Readable } from 'stream';
|
||||
const IS_READING_BUILD_OUTPUT = 1 << 2;
|
||||
const ALL_FILES_EXISTS = 1 << 3;
|
||||
|
||||
export const downloadPreviousBuild = task(import.meta.path, async () => {
|
||||
export const downloadPreviousBuild = task(import.meta.path, async (span) => {
|
||||
const buildOutputList: string[] = [];
|
||||
|
||||
let flag = 1 | ALL_FILES_EXISTS;
|
||||
|
||||
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
|
||||
if (line === '# $ build output') {
|
||||
flag = flag | IS_READING_BUILD_OUTPUT;
|
||||
continue;
|
||||
}
|
||||
if (!(flag & IS_READING_BUILD_OUTPUT)) {
|
||||
continue;
|
||||
}
|
||||
await span
|
||||
.traceChild('read .gitignore')
|
||||
.traceAsyncFn(async () => {
|
||||
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../.gitignore'))) {
|
||||
if (line === '# $ build output') {
|
||||
flag = flag | IS_READING_BUILD_OUTPUT;
|
||||
continue;
|
||||
}
|
||||
if (!(flag & IS_READING_BUILD_OUTPUT)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
buildOutputList.push(line);
|
||||
buildOutputList.push(line);
|
||||
|
||||
if (!isCI) {
|
||||
// Bun.file().exists() doesn't check directory
|
||||
if (!existsSync(path.join(import.meta.dir, '..', line))) {
|
||||
flag = flag & ~ALL_FILES_EXISTS;
|
||||
if (!isCI) {
|
||||
// Bun.file().exists() doesn't check directory
|
||||
if (!existsSync(path.join(import.meta.dir, '..', line))) {
|
||||
flag = flag & ~ALL_FILES_EXISTS;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (isCI) {
|
||||
flag = flag & ~ALL_FILES_EXISTS;
|
||||
@@ -48,42 +52,46 @@ export const downloadPreviousBuild = task(import.meta.path, async () => {
|
||||
|
||||
const filesList = buildOutputList.map(f => path.join('ruleset.skk.moe-master', f));
|
||||
|
||||
const resp = await fetchWithRetry('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master', defaultRequestInit);
|
||||
return span
|
||||
.traceChild('download & extract previoud build')
|
||||
.traceAsyncFn(async () => {
|
||||
const resp = await fetchWithRetry('https://codeload.github.com/sukkalab/ruleset.skk.moe/tar.gz/master', defaultRequestInit);
|
||||
|
||||
if (!resp.body) {
|
||||
throw new Error('Download previous build failed! No body found');
|
||||
}
|
||||
if (!resp.body) {
|
||||
throw new Error('Download previous build failed! No body found');
|
||||
}
|
||||
|
||||
const extract = tarStream.extract();
|
||||
const gunzip = zlib.createGunzip();
|
||||
pipeline(
|
||||
Readable.fromWeb(resp.body) as any,
|
||||
gunzip,
|
||||
extract
|
||||
);
|
||||
const extract = tarStream.extract();
|
||||
const gunzip = zlib.createGunzip();
|
||||
pipeline(
|
||||
Readable.fromWeb(resp.body) as any,
|
||||
gunzip,
|
||||
extract
|
||||
);
|
||||
|
||||
const pathPrefix = `ruleset.skk.moe-master${path.sep}`;
|
||||
const pathPrefix = `ruleset.skk.moe-master${path.sep}`;
|
||||
|
||||
for await (const entry of extract) {
|
||||
if (entry.header.type !== 'file') {
|
||||
entry.resume(); // Drain the entry
|
||||
continue;
|
||||
}
|
||||
// filter entry
|
||||
if (!filesList.some(f => entry.header.name.startsWith(f))) {
|
||||
entry.resume(); // Drain the entry
|
||||
continue;
|
||||
}
|
||||
for await (const entry of extract) {
|
||||
if (entry.header.type !== 'file') {
|
||||
entry.resume(); // Drain the entry
|
||||
continue;
|
||||
}
|
||||
// filter entry
|
||||
if (!filesList.some(f => entry.header.name.startsWith(f))) {
|
||||
entry.resume(); // Drain the entry
|
||||
continue;
|
||||
}
|
||||
|
||||
const relativeEntryPath = entry.header.name.replace(pathPrefix, '');
|
||||
const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
|
||||
const relativeEntryPath = entry.header.name.replace(pathPrefix, '');
|
||||
const targetPath = path.join(import.meta.dir, '..', relativeEntryPath);
|
||||
|
||||
await mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await pipeline(
|
||||
entry as any,
|
||||
createWriteStream(targetPath)
|
||||
);
|
||||
}
|
||||
await mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await pipeline(
|
||||
entry as any,
|
||||
createWriteStream(targetPath)
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (import.meta.main) {
|
||||
|
||||
Reference in New Issue
Block a user