mirror of
https://github.com/SukkaW/Surge.git
synced 2026-01-28 17:41:54 +08:00
Chore: simplify build infra / remove reject_phishing
This commit is contained in:
@@ -10,12 +10,13 @@ import { createRuleset, compareAndWriteFile } from './lib/create-file';
|
||||
import { processLine } from './lib/process-line';
|
||||
import { domainDeduper } from './lib/domain-deduper';
|
||||
import createKeywordFilter from './lib/aho-corasick';
|
||||
import { readFileByLine } from './lib/fetch-remote-text-by-line';
|
||||
import { readFileByLine } from './lib/fetch-text-by-line';
|
||||
import { createDomainSorter } from './lib/stable-sort-domain';
|
||||
import { traceSync, task } from './lib/trace-runner';
|
||||
import { traceSync, task, traceAsync } from './lib/trace-runner';
|
||||
import { getGorhillPublicSuffixPromise } from './lib/get-gorhill-publicsuffix';
|
||||
import * as tldts from 'tldts';
|
||||
import { SHARED_DESCRIPTION } from './lib/constants';
|
||||
import { getPhishingDomains } from './lib/get-phishing-domains';
|
||||
|
||||
/** Whitelists */
|
||||
const filterRuleWhitelistDomainSets = new Set(PREDEFINED_WHITELIST);
|
||||
@@ -29,67 +30,65 @@ export const buildRejectDomainSet = task(import.meta.path, async () => {
|
||||
const domainSets = new Set<string>();
|
||||
|
||||
// Parse from AdGuard Filters
|
||||
console.time('* Download and process Hosts / AdBlock Filter Rules');
|
||||
const [gorhill, shouldStop] = await traceAsync('* Download and process Hosts / AdBlock Filter Rules', async () => {
|
||||
let shouldStop = false;
|
||||
|
||||
let shouldStop = false;
|
||||
const [gorhill] = await Promise.all([
|
||||
getGorhillPublicSuffixPromise(),
|
||||
// Parse from remote hosts & domain lists
|
||||
...HOSTS.map(entry => processHosts(entry[0], entry[1]).then(hosts => {
|
||||
hosts.forEach(host => {
|
||||
if (host) {
|
||||
domainSets.add(host);
|
||||
}
|
||||
});
|
||||
})),
|
||||
...ADGUARD_FILTERS.map(input => {
|
||||
const promise = typeof input === 'string'
|
||||
? processFilterRules(input)
|
||||
: processFilterRules(input[0], input[1]);
|
||||
|
||||
const [gorhill] = await Promise.all([
|
||||
getGorhillPublicSuffixPromise(),
|
||||
// Parse from remote hosts & domain lists
|
||||
...HOSTS.map(entry => processHosts(entry[0], entry[1]).then(hosts => {
|
||||
hosts.forEach(host => {
|
||||
if (host) {
|
||||
domainSets.add(host);
|
||||
}
|
||||
});
|
||||
})),
|
||||
...ADGUARD_FILTERS.map(input => {
|
||||
const promise = typeof input === 'string'
|
||||
? processFilterRules(input)
|
||||
: processFilterRules(input[0], input[1] || undefined);
|
||||
|
||||
return promise.then((i) => {
|
||||
if (i) {
|
||||
const { white, black, foundDebugDomain } = i;
|
||||
return promise.then(({ white, black, foundDebugDomain }) => {
|
||||
if (foundDebugDomain) {
|
||||
shouldStop = true;
|
||||
// we should not break here, as we want to see full matches from all data source
|
||||
}
|
||||
white.forEach(i => filterRuleWhitelistDomainSets.add(i));
|
||||
black.forEach(i => domainSets.add(i));
|
||||
} else {
|
||||
process.exitCode = 1;
|
||||
throw new Error('Failed to process AdGuard Filter Rules!');
|
||||
}
|
||||
});
|
||||
}),
|
||||
...([
|
||||
'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exceptions.txt',
|
||||
'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt'
|
||||
].map(input => processFilterRules(input).then((i) => {
|
||||
if (i) {
|
||||
const { white, black } = i;
|
||||
});
|
||||
}),
|
||||
...([
|
||||
'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exceptions.txt',
|
||||
'https://raw.githubusercontent.com/AdguardTeam/AdGuardSDNSFilter/master/Filters/exclusions.txt'
|
||||
].map(input => processFilterRules(input).then(({ white, black }) => {
|
||||
white.forEach(i => {
|
||||
filterRuleWhitelistDomainSets.add(i);
|
||||
});
|
||||
black.forEach(i => {
|
||||
filterRuleWhitelistDomainSets.add(i);
|
||||
});
|
||||
} else {
|
||||
process.exitCode = 1;
|
||||
throw new Error('Failed to process AdGuard Filter Rules!');
|
||||
}
|
||||
})))
|
||||
]);
|
||||
}))),
|
||||
getPhishingDomains().then(([purePhishingDomains, fullDomainSet]) => {
|
||||
fullDomainSet.forEach(host => {
|
||||
if (host) {
|
||||
domainSets.add(host);
|
||||
}
|
||||
});
|
||||
purePhishingDomains.forEach(suffix => {
|
||||
domainSets.add(`.${suffix}`);
|
||||
});
|
||||
})
|
||||
]);
|
||||
|
||||
// remove pre-defined enforced blacklist from whitelist
|
||||
const trie0 = createTrie(filterRuleWhitelistDomainSets);
|
||||
PREDEFINED_ENFORCED_BACKLIST.forEach(enforcedBlack => {
|
||||
trie0.find(enforcedBlack).forEach(found => filterRuleWhitelistDomainSets.delete(found));
|
||||
// remove pre-defined enforced blacklist from whitelist
|
||||
const trie0 = createTrie(filterRuleWhitelistDomainSets);
|
||||
PREDEFINED_ENFORCED_BACKLIST.forEach(enforcedBlack => {
|
||||
trie0.find(enforcedBlack).forEach(found => filterRuleWhitelistDomainSets.delete(found));
|
||||
});
|
||||
|
||||
return [gorhill, shouldStop] as const;
|
||||
});
|
||||
|
||||
console.timeEnd('* Download and process Hosts / AdBlock Filter Rules');
|
||||
|
||||
if (shouldStop) {
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -117,57 +116,46 @@ export const buildRejectDomainSet = task(import.meta.path, async () => {
|
||||
}
|
||||
}
|
||||
|
||||
for await (const line of readFileByLine(path.resolve(import.meta.dir, '../List/domainset/reject_phishing.conf'))) {
|
||||
const l = processLine(line);
|
||||
if (l?.[0] === '.') {
|
||||
domainSuffixSet.add(l.slice(1));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Import ${domainKeywordsSet.size} black keywords and ${domainSuffixSet.size} black suffixes!`);
|
||||
|
||||
previousSize = domainSets.size;
|
||||
// Dedupe domainSets
|
||||
console.log(`Start deduping from black keywords/suffixes! (${previousSize})`);
|
||||
console.time('* Dedupe from black keywords/suffixes');
|
||||
traceSync('* Dedupe from black keywords/suffixes', () => {
|
||||
const trie1 = createTrie(domainSets);
|
||||
domainSuffixSet.forEach(suffix => {
|
||||
trie1.find(suffix, true).forEach(f => domainSets.delete(f));
|
||||
});
|
||||
filterRuleWhitelistDomainSets.forEach(suffix => {
|
||||
trie1.find(suffix, true).forEach(f => domainSets.delete(f));
|
||||
});
|
||||
|
||||
const trie1 = createTrie(domainSets);
|
||||
domainSuffixSet.forEach(suffix => {
|
||||
trie1.find(suffix, true).forEach(f => domainSets.delete(f));
|
||||
});
|
||||
filterRuleWhitelistDomainSets.forEach(suffix => {
|
||||
trie1.find(suffix, true).forEach(f => domainSets.delete(f));
|
||||
});
|
||||
// remove pre-defined enforced blacklist from whitelist
|
||||
const kwfilter = createKeywordFilter(domainKeywordsSet);
|
||||
|
||||
// remove pre-defined enforced blacklist from whitelist
|
||||
const kwfilter = createKeywordFilter(domainKeywordsSet);
|
||||
|
||||
// Build whitelist trie, to handle case like removing `g.msn.com` due to white `.g.msn.com` (`@@||g.msn.com`)
|
||||
const trieWhite = createTrie(filterRuleWhitelistDomainSets);
|
||||
for (const domain of domainSets) {
|
||||
if (domain[0] === '.') {
|
||||
if (trieWhite.contains(domain)) {
|
||||
// Build whitelist trie, to handle case like removing `g.msn.com` due to white `.g.msn.com` (`@@||g.msn.com`)
|
||||
const trieWhite = createTrie(filterRuleWhitelistDomainSets);
|
||||
for (const domain of domainSets) {
|
||||
if (domain[0] === '.') {
|
||||
if (trieWhite.contains(domain)) {
|
||||
domainSets.delete(domain);
|
||||
continue;
|
||||
}
|
||||
} else if (trieWhite.has(`.${domain}`)) {
|
||||
domainSets.delete(domain);
|
||||
continue;
|
||||
}
|
||||
} else if (trieWhite.has(`.${domain}`)) {
|
||||
domainSets.delete(domain);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Remove keyword
|
||||
if (kwfilter.search(domain)) {
|
||||
domainSets.delete(domain);
|
||||
// Remove keyword
|
||||
if (kwfilter.search(domain)) {
|
||||
domainSets.delete(domain);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.timeEnd('* Dedupe from black keywords/suffixes');
|
||||
console.log(`Deduped ${previousSize} - ${domainSets.size} = ${previousSize - domainSets.size} from black keywords and suffixes!`);
|
||||
|
||||
previousSize = domainSets.size;
|
||||
// Dedupe domainSets
|
||||
console.log(`Start deduping! (${previousSize})`);
|
||||
|
||||
const dudupedDominArray = traceSync('* Dedupe from covered subdomain', () => domainDeduper(Array.from(domainSets)));
|
||||
console.log(`Deduped ${previousSize - dudupedDominArray.length} rules!`);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user