Surge_by_SukkaW/Dist/chunks/fetch-text-by-line.YY5b5610.cjs
SukkaW d354c5e988
Some checks are pending
Build / Build (push) Waiting to run
Build / Diff output (push) Blocked by required conditions
Build / Deploy to Cloudflare Pages (push) Blocked by required conditions
Build / Deploy to GitHub and GitLab (push) Blocked by required conditions
Chore: maintainance
2025-02-07 17:52:36 +08:00

221 lines
7.9 KiB
JavaScript

'use strict';const require$$1=require('node:fs'),require$$1$1=require('node:readline'),require$$0=require('node:stream/web'),fetchRetry=require('./fetch-retry.D06uBUaW.cjs'),require$$6=require('foxts/guard');var processLine = {};var hasRequiredProcessLine;
function requireProcessLine () {
if (hasRequiredProcessLine) return processLine;
hasRequiredProcessLine = 1;
(function (exports) {
Object.defineProperty(exports, "__esModule", {
value: true
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
ProcessLineStream: function() {
return ProcessLineStream;
},
processLine: function() {
return processLine;
}
});
const _web = require$$0;
function processLine(line) {
const trimmed = line.trim();
if (trimmed.length === 0) {
return null;
}
const line_0 = trimmed.charCodeAt(0);
if (// line_0 === 32 /** [space] */
// || line_0 === 13 /** \r */
// || line_0 === 10 /** \n */
line_0 === 33 /** ! */ || line_0 === 47 /** / */ && trimmed.charCodeAt(1) === 47 /** / */ ) {
return null;
}
if (line_0 === 35 /** # */ ) {
if (trimmed.charCodeAt(1) !== 35 /** # */ ) {
// # Comment
return null;
}
if (trimmed.charCodeAt(2) === 35 /** # */ && trimmed.charCodeAt(3) === 35) {
// ################## EOF ##################
return null;
}
/**
* AdGuard Filter can be:
*
* ##.class
* ##tag.class
* ###id
*/ }
return trimmed;
}
class ProcessLineStream extends _web.TransformStream {
// private __buf = '';
constructor(){
super({
transform (l, controller) {
const line = processLine(l);
if (line) {
controller.enqueue(line);
}
}
});
}
} // export class ProcessLineNodeStream extends Transform {
// _transform(chunk: string, encoding: BufferEncoding, callback: TransformCallback) {
// // Convert chunk to string and then to uppercase
// const upperCased = chunk.toUpperCase();
// // Push transformed data to readable side
// this.push(upperCased);
// // Call callback when done
// callback();
// }
// }
} (processLine));
return processLine;
}var fetchTextByLine = {};var textLineTransformStream = {};var hasRequiredTextLineTransformStream;
function requireTextLineTransformStream () {
if (hasRequiredTextLineTransformStream) return textLineTransformStream;
hasRequiredTextLineTransformStream = 1;
(function (exports) {
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "TextLineStream", {
enumerable: true,
get: function() {
return TextLineStream;
}
});
const _web = require$$0;
class TextLineStream extends _web.TransformStream {
// private __buf = '';
constructor({ allowCR = false } = {}){
let __buf = '';
let chunkIndex = 0;
super({
transform (chunk, controller) {
chunk = __buf + chunk;
chunkIndex = 0;
for(;;){
const lfIndex = chunk.indexOf('\n', chunkIndex);
if (allowCR) {
const crIndex = chunk.indexOf('\r', chunkIndex);
if (crIndex !== -1 && crIndex !== chunk.length - 1 && (lfIndex === -1 || lfIndex - 1 > crIndex)) {
controller.enqueue(chunk.slice(chunkIndex, crIndex));
chunkIndex = crIndex + 1;
continue;
}
}
if (lfIndex === -1) {
break;
}
// enqueue current line, and loop again to find next line
let crOrLfIndex = lfIndex;
if (chunk[lfIndex - 1] === '\r') {
crOrLfIndex--;
}
controller.enqueue(chunk.slice(chunkIndex, crOrLfIndex));
chunkIndex = lfIndex + 1;
continue;
}
__buf = chunk.slice(chunkIndex);
},
flush (controller) {
if (__buf.length > 0) {
// eslint-disable-next-line sukka/string/prefer-string-starts-ends-with -- performance
if (allowCR && __buf[__buf.length - 1] === '\r') {
controller.enqueue(__buf.slice(0, -1));
} else {
controller.enqueue(__buf);
}
}
}
});
}
}
} (textLineTransformStream));
return textLineTransformStream;
}var hasRequiredFetchTextByLine;
function requireFetchTextByLine () {
if (hasRequiredFetchTextByLine) return fetchTextByLine;
hasRequiredFetchTextByLine = 1;
(function (exports) {
Object.defineProperty(exports, "__esModule", {
value: true
});
function _export(target, all) {
for(var name in all)Object.defineProperty(target, name, {
enumerable: true,
get: all[name]
});
}
_export(exports, {
createReadlineInterfaceFromResponse: function() {
return createReadlineInterfaceFromResponse;
},
fetchRemoteTextByLine: function() {
return fetchRemoteTextByLine;
},
readFileByLine: function() {
return readFileByLine;
},
readFileIntoProcessedArray: function() {
return readFileIntoProcessedArray;
}
});
const _nodefs = /*#__PURE__*/ _interop_require_default(require$$1);
const _nodereadline = /*#__PURE__*/ _interop_require_default(require$$1$1);
const _textlinetransformstream = /*@__PURE__*/ requireTextLineTransformStream();
const _web = require$$0;
const _processline = /*@__PURE__*/ requireProcessLine();
const _fetchretry = /*@__PURE__*/ fetchRetry.r();
const _guard = require$$6;
function _interop_require_default(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function readFileByLine(file) {
return _nodereadline.default.createInterface({
input: _nodefs.default.createReadStream(file /* , { encoding: 'utf-8' } */ ),
crlfDelay: Infinity
});
}
const createReadlineInterfaceFromResponse = (resp, processLine = false)=>{
(0, _guard.invariant)(resp.body, 'Failed to fetch remote text');
if ('bodyUsed' in resp && resp.bodyUsed) {
throw new Error('Body has already been consumed.');
}
let webStream;
if ('pipeThrough' in resp.body) {
webStream = resp.body;
} else {
throw new TypeError('Invalid response body!');
}
const resultStream = webStream.pipeThrough(new _web.TextDecoderStream()).pipeThrough(new _textlinetransformstream.TextLineStream());
if (processLine) {
return resultStream.pipeThrough(new _processline.ProcessLineStream());
}
return resultStream;
};
function fetchRemoteTextByLine(url, processLine = false) {
return (0, _fetchretry.$$fetch)(url).then((resp)=>createReadlineInterfaceFromResponse(resp, processLine));
}
async function readFileIntoProcessedArray(file/* | FileHandle */ ) {
const results = [];
for await (const line of readFileByLine(file)){
if ((0, _processline.processLine)(line)) {
results.push(line);
}
}
return results;
}
} (fetchTextByLine));
return fetchTextByLine;
}exports.a=requireFetchTextByLine;exports.b=requireTextLineTransformStream;exports.r=requireProcessLine;