This commit is contained in:
SukkaW
2023-06-29 14:36:15 +08:00
parent 5d86ba3d2d
commit 1a7638b1ec
8 changed files with 152 additions and 93 deletions

View File

@@ -0,0 +1,57 @@
// @ts-check
const fs = require('fs');
const fse = require('fs-extra');
const path = require('path');
const readline = require('readline');
const { isDomainLoose } = require('./lib/is-domain-loose');
(async () => {
const results = [];
for await (
const line of readline.createInterface({
input: fs.createReadStream(path.resolve(__dirname, '../List/non_ip/cdn.conf')),
crlfDelay: Infinity
})
) {
if (line.startsWith('DOMAIN-SUFFIX,')) {
results.push(line.replace('DOMAIN-SUFFIX,', 'SUFFIX,'));
} else if (line.startsWith('DOMAIN,')) {
results.push(line.replace('DOMAIN,', 'SUFFIX,'));
}
}
for await (
const line of readline.createInterface({
input: fs.createReadStream(path.resolve(__dirname, '../List/domainset/cdn.conf')),
crlfDelay: Infinity
})
) {
if (line[0] === '.') {
results.push(`SUFFIX,${line.slice(1)}`);
} else if (isDomainLoose(line)) {
results.push(`SUFFIX,${line}`);
}
}
for await (
const line of readline.createInterface({
input: fs.createReadStream(path.resolve(__dirname, '../List/domainset/download.conf')),
crlfDelay: Infinity
})
) {
if (line[0] === '.') {
results.push(`SUFFIX,${line.slice(1)}`);
} else if (isDomainLoose(line)) {
results.push(`SUFFIX,${line}`);
}
}
results.push('');
await fse.ensureDir(path.resolve(__dirname, '../List/internal'));
await fs.promises.writeFile(
path.resolve(__dirname, '../List/internal/cdn.csv'),
results.join('\n')
);
})();

View File

@@ -14,46 +14,45 @@ const fileExists = (path) => {
};
(async () => {
const filesList = (
await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' })
)
const filesList = (await fs.promises.readFile(resolve(__dirname, '../.gitignore'), { encoding: 'utf-8' }))
.split('\n')
.filter(p => p.startsWith('List/'));
.filter(p => p.startsWith('List/') && !p.endsWith('/'));
if (
(await Promise.all(
!((await Promise.all(
filesList.map(p => fileExists(join(__dirname, '..', p)))
)).some(exist => !exist)
)).some(exist => !exist))
) {
const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
const readableNodeStream = Readable.fromWeb(resp.body);
await pipeline(
readableNodeStream,
fs.createWriteStream(tempFile)
);
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
await fse.ensureDir(extractedPath);
await tar.x({
file: tempFile,
cwd: extractedPath,
filter: (p) => {
return p.split('/')[1] === 'List'
}
});
await Promise.all(filesList.map(p => fse.copy(
join(extractedPath, 'Surge-gh-pages', p),
join(__dirname, '..', p),
{
overwrite: true
}
)))
await fs.promises.unlink(tempFile).catch(() => { });
await fs.promises.unlink(extractedPath).catch(() => { });
} else {
console.log('All files exists, skip download.');
return;
}
const tempFile = join(tmpdir(), `sukka-surge-last-build-tar-${Date.now()}`);
const resp = await fetch('https://codeload.github.com/sukkaw/surge/tar.gz/gh-pages');
const readableNodeStream = Readable.fromWeb(resp.body);
await pipeline(
readableNodeStream,
fs.createWriteStream(tempFile)
);
const extractedPath = join(tmpdir(), `sukka-surge-last-build-extracted-${Date.now()}`);
await fse.ensureDir(extractedPath);
await tar.x({
file: tempFile,
cwd: extractedPath,
filter: (p) => {
return p.split('/')[1] === 'List'
}
});
await Promise.all(filesList.map(p => fse.copy(
join(extractedPath, 'Surge-gh-pages', p),
join(__dirname, '..', p),
{
overwrite: true
}
)))
await fs.promises.unlink(tempFile).catch(() => { });
await fs.promises.unlink(extractedPath).catch(() => { });
})();

View File

@@ -81,9 +81,7 @@ const validateRuleset = async (filePath) => {
]);
await Promise.all(
domainsetFiles.map(file => validateDomainSet(file))
// rulesetFiles.map(file => validateRuleset(file))
);
// await Promise.all(
// rulesetFiles.map(file => validateRuleset(file))
// );
})();