Compare commits

...

10 Commits

18 changed files with 1373 additions and 227 deletions

View File

@ -1,6 +1,6 @@
ISC License
Copyright (c) 2021, acidvegas <acid.vegas@acid.vegas>
Copyright (c) 2024, acidvegas <acid.vegas@acid.vegas>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above

View File

@ -8,5 +8,4 @@
___
###### Mirrors
[acid.vegas](https://git.acid.vegas/proxytools) • [GitHub](https://github.com/acidvegas/proxytools) • [GitLab](https://gitlab.com/acidvegas/proxytools) • [SuperNETs](https://git.supernets.org/acidvegas/proxytools)
###### Mirrors for this repository: [acid.vegas](https://git.acid.vegas/proxytools) • [SuperNETs](https://git.supernets.org/acidvegas/proxytools) • [GitHub](https://github.com/acidvegas/proxytools) • [GitLab](https://gitlab.com/acidvegas/proxytools) • [Codeberg](https://codeberg.org/acidvegas/proxytools)

View File

@ -1,80 +1,133 @@
#!/usr/bin/env python
# CleanSocks - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
'''
This script will clean a list of proxies by removing duplicates, checking for valid formats (IP:PORT), & testing if the proxies are working
'''
# This script will clean a list of proxies by removing duplicates, checking for valid formats (IP:PORT), & testing if the proxies are working
# If a proxy is found working on multiple ports, we will only store the first working port to avoid ip duplication in the clean results.
import argparse
import asyncio
import os
import re
# Globals
all = list()
good = list()
print_bad = True
try:
import aiosocks
except ImportError:
raise SystemExit('missing pysocks module (https://pypi.org/project/aiosocks/)')
async def check(semaphore, proxy):
try:
import aiohttp
except ImportError:
raise SystemExit('missing pysocks module (https://pypi.org/project/aiosocks/)')
# Globals
all = list()
good = list()
async def check_http_proxy(semaphore: asyncio.Semaphore, proxy: str):
'''
Checks if a HTTP proxy is working.
:param semaphore: The semaphore to use.
:param proxy: The proxy to check.
'''
async with semaphore:
ip, port = proxy.split(':')
proxy_ip = proxy.split(':')[0]
async with aiohttp.ClientSession() as session:
try:
async with session.get('https://google.com', proxy=f'http://{proxy}', timeout=args.timeout) as response:
if response.status == 200:
print('\033[1;32mGOOD\033[0m \033[30m|\033[0m ' + proxy)
if proxy_ip not in all:
all.append(proxy_ip)
good.append(proxy)
else:
if args.bad:
print('\033[1;31mBAD\033[0m \033[30m|\033[0m ' + proxy)
except:
if args.bad:
print('\033[1;31mBAD\033[0m \033[30m|\033[0m ' + proxy)
async def check_socks_proxy(semaphore: asyncio.Semaphore, proxy: str):
'''
Checks if a SOCKS proxy is working.
:param semaphore: The semaphore to use.
:param proxy: The proxy to check.
'''
async with semaphore:
proxy_ip, proxy_port = proxy.split(':')
options = {
'proxy' : aiosocks.Socks5Addr(proxy.split(':')[0], int(proxy.split(':')[1])),
'proxy' : aiosocks.Socks5Addr(proxy_ip, proxy_port),
'proxy_auth' : None,
'dst' : ('www.google.com',80),
'dst' : ('www.google.com', 80),
'limit' : 1024,
'ssl' : None,
'family' : 2
}
try:
await asyncio.wait_for(aiosocks.open_connection(**options), 15)
await asyncio.wait_for(aiosocks.open_connection(**options), args.timeout)
except:
if print_bad:
if args.bad:
print('\033[1;31mBAD\033[0m \033[30m|\033[0m ' + proxy)
else:
print('\033[1;32mGOOD\033[0m \033[30m|\033[0m ' + proxy)
if ip not in all:
all.append(ip)
if proxy_ip not in all:
all.append(proxy_ip)
good.append(proxy)
async def main(targets):
sema = asyncio.BoundedSemaphore(500)
'''
Starts the main event loop.
:param targets: The proxies to check.
'''
sema = asyncio.BoundedSemaphore(args.threads)
jobs = list()
for target in targets:
jobs.append(asyncio.ensure_future(check(sema, target)))
if args.socks:
jobs.append(asyncio.ensure_future(check_socks_proxy(sema, target)))
else:
jobs.append(asyncio.ensure_future(check_http_proxy(sema, target)))
await asyncio.gather(*jobs)
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('CleanSOCKS Proxy Cleaner'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')
parser.add_argument('-t', '--threads', help='number of threads (default: 100)', default=100, type=int)
parser.add_argument('-x', '--timeout', help='socket timeout seconds (default: 15)', default=15, type=int)
args = parser.parse_args()
try:
import aiosocks
except ImportError:
raise SystemExit('missing pysocks module (https://pypi.org/project/aiosocks/)')
if not os.path.isfile(args.input):
raise SystemExit('no such input file')
initial = len(open(args.input).readlines())
proxies = set([proxy for proxy in re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', open(args.input).read(), re.MULTILINE)])
if not proxies:
raise SystemExit('no proxies found from input file')
asyncio.run(main(proxies))
good.sort()
with open(args.output, 'w') as output_file:
output_file.write('\n'.join(good))
print('\033[34mTotal\033[0m : ' + format(len(proxies), ',d'))
print('\033[34mGood\033[0m : ' + format(len(good), ',d'))
print('\033[34mBad\033[0m : ' + format(len(proxies)-len(good), ',d'))
print('\033[34mDupe\033[0m : ' + format(initial-len(proxies), ',d'))
if __name__ == '__main__':
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('CleanSOCKS Proxy Cleaner'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')
parser.add_argument('-s', '--socks', action='store_true', help='Check SOCKS proxies (default: HTTP)')
parser.add_argument('-b', '--bad', action='store_true', help='Show bad proxies')
parser.add_argument('-t', '--threads', help='number of threads (default: 100)', default=100, type=int)
parser.add_argument('-x', '--timeout', help='socket timeout seconds (default: 15)', default=15, type=int)
args = parser.parse_args()
if not os.path.isfile(args.input):
raise SystemExit('no such input file')
initial = len(open(args.input).readlines())
proxies = set([proxy for proxy in re.findall(r'[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', open(args.input).read(), re.MULTILINE)])
if not proxies:
raise SystemExit('no proxies found from input file')
print('\033[1;32mChecking {0:,} {1} proxies using {2:,} threads... \033[1;30m(Pass the -h or --help argument to change these settings)\033[0m'.format(len(proxies), 'SOCKS' if args.socks else 'HTTP', args.threads))
asyncio.run(main(proxies))
good.sort()
with open(args.output, 'w') as output_file:
output_file.write('\n'.join(good))
print('\033[34mTotal\033[0m : ' + format(len(proxies), ',d'))
print('\033[34mGood\033[0m : ' + format(len(good), ',d'))
print('\033[34mBad\033[0m : ' + format(len(proxies)-len(good), ',d'))
print('\033[34mDupe\033[0m : ' + format(initial-len(proxies), ',d'))

617
dnsbl.py Normal file
View File

@ -0,0 +1,617 @@
#!/usr/bin/env python
# DNSBL - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
import argparse
import asyncio
import ipaddress
import logging
import os
try:
import aiodns
except ImportError:
raise SystemExit('missing required library \'aiodns\' (https://pypi.org/project/aiodns/)')
# ANSI color codes
RED = '\033[91m'
GREEN = '\033[92m'
GREY = '\033[90m'
RESET = '\033[0m'
DNSBL_LIST = [
'0outspam.fusionzero.com',
'0spam-killlist.fusionzero.com',
'0spam.fusionzero.com',
'0spamtrust.fusionzero.com',
'0spamurl.fusionzero.com',
'3y.spam.mrs.kithrup.com',
'88.blocklist.zap',
'abuse-contacts.abusix.org',
'abuse.rfc-clueless.org',
'abuse.rfc-ignorant.org',
'access.atlbl.net',
'access.redhawk.org',
'accredit.habeas.com',
'admin.bl.kundenserver.de',
'all.ascc.dnsbl.bit.nl',
'all.dnsbl.bit.nl',
'all.rbl.jp',
'all.rbl.webiron.net',
'all.s5h.net',
'all.spam-rbl.fr',
'all.spamblock.unit.liu.se',
'all.spamrats.com',
'all.v6.ascc.dnsbl.bit.nl',
'apnic-main.bogons.dnsiplists.completewhois.com',
'arin-legacy-classb.bogons.dnsiplists.completewhois.com',
'arin-legacy-classc.bogons.dnsiplists.completewhois.com',
'arin-main.bogons.dnsiplists.completewhois.com',
'asiaspam.spamblocked.com',
'asn.routeviews.org',
'aspath.routeviews.org',
'aspews.dnsbl.sorbs.net',
'aspews.ext.sorbs.net',
'assholes.madscience.nl',
'auth.spamrats.com',
'autowork.drbl.ks.cz',
'b.barracudacentral.org',
'babl.rbl.webiron.net',
'backscatter.spameatingmonkey.net',
'bad.psky.me',
'badconf.rhsbl.sorbs.net',
'badnets.spameatingmonkey.net',
'ban.zebl.zoneedit.com',
'bandwidth-pigs.monkeys.com',
'bb.barracudacentral.org',
'bitonly.dnsbl.bit.nl',
'bl.blocklist.de',
'bl.blueshore.net',
'bl.borderworlds.dk',
'bl.deadbeef.com',
'bl.drmx.org',
'bl.emailbasura.org',
'bl.fmb.la',
'bl.ipv6.spameatingmonkey.net',
'bl.konstant.no',
'bl.mailspike.net',
'bl.mailspike.org',
'bl.mav.com.br',
'bl.mipspace.com',
'bl.nszones.com',
'bl.rbl-dns.com',
'bl.reynolds.net.au',
'bl.scientificspam.net',
'bl.score.senderscore.com',
'bl.shlink.org',
'bl.shlink.orgdul.ru',
'bl.spamcannibal.org',
'bl.spamcop.net',
'bl.spameatingmonkey.net',
'bl.spamstinks.com',
'bl.spamthwart.com',
'bl.student.pw.edu.pl',
'bl.suomispam.net',
'bl.tiopan.com',
'bl.tolkien.dk',
'black.junkemailfilter.com',
'blackhole.compu.net',
'blackholes.brainerd.net',
'blackholes.easynet.nl',
'blackholes.five-ten-sg.com',
'blackholes.mail-abuse.org',
'blackholes.sandes.dk',
'blacklist.fpsn.net',
'blacklist.hostkarma.com',
'blacklist.informationwave.net',
'blacklist.mail.ops.asp.att.net',
'blacklist.mailrelay.att.net',
'blacklist.sci.kun.nl',
'blacklist.sci.ru.nl',
'blacklist.sequoia.ops.asp.att.net',
'blacklist.woody.ch',
'block.ascams.com',
'block.blars.org',
'block.dnsbl.sorbs.net',
'blocked.asgardnet.org',
'blocked.hilli.dk',
'blocklist.squawk.com',
'blocklist2.squawk.com',
'bogon.lbl.lagengymnastik.dk',
'bogons.cymru.com',
'bogons.dnsiplists.completewhois.com',
'bogusmx.rfc-clueless.org',
'bogusmx.rfc-ignorant.org',
'bsb.empty.us',
'bsb.spamlookup.net',
'cabl.rbl.webiron.net',
'cart00ney.surriel.com',
'catchspam.com',
'cbl.abuseat.org',
'cbl.anti-spam.org.cn',
'cblless.anti-spam.org.cn',
'cblplus.anti-spam.org.cn',
'ccess.redhawk.org',
'cdl.anti-spam.org.cn',
'china.rominet.net',
'cidr.bl.mcafee.com',
'client-domain.sjesl.monkeys.com',
'cml.anti-spam.org.cn',
'combined-hib.dnsiplists.completewhois.com',
'combined.abuse.ch',
'combined.njabl.org',
'combined.rbl.msrbl.net',
'communicado.fmb.la',
'contacts.abuse.net',
'country-rirdata.dnsiplists.completewhois.com',
'crawler.rbl.webiron.net',
'csi.cloudmark.com',
'czdynamic.drbl.ks.cz',
'db.rurbl.ru',
'db.wpbl.info',
'dbl.spamhaus.org',
'dbl.suomispam.net',
'dev.null.dk',
'devnull.drbl.be.net.ru',
'dialup.blacklist.jippg.org',
'dialup.drbl.sandy.ru',
'dialups.mail-abuse.org',
'dialups.visi.com',
'dnsbl-0.uceprotect.net',
'dnsbl-1.uceprotect.net',
'dnsbl-2.uceprotect.net',
'dnsbl-3.uceprotect.net',
'dnsbl.abuse.ch',
'dnsbl.anticaptcha.net',
'dnsbl.antispam.or.id',
'dnsbl.aspnet.hu',
'dnsbl.calivent.com.pe',
'dnsbl.cbn.net.id',
'dnsbl.clue-by-4.org',
'dnsbl.cobion.com',
'dnsbl.cyberlogic.net',
'dnsbl.delink.net',
'dnsbl.dronebl.org',
'dnsbl.forefront.microsoft.com',
'dnsbl.httpbl.org',
'dnsbl.inps.de',
'dnsbl.ioerror.us',
'dnsbl.justspam.org',
'dnsbl.kempt.net',
'dnsbl.madavi.de',
'dnsbl.mags.net',
'dnsbl.mailshell.net',
'dnsbl.mcu.edu.tw',
'dnsbl.net.ua',
'dnsbl.njabl.org',
'dnsbl.pagedirect.net',
'dnsbl.proxybl.org',
'dnsbl.rangers.eu.org',
'dnsbl.rizon.net',
'dnsbl.rv-soft.info',
'dnsbl.rymsho.ru',
'dnsbl.sorbs.net',
'dnsbl.spam-champuru.livedoor.com',
'dnsbl.spfbl.net',
'dnsbl.technoirc.org',
'dnsbl.tornevall.org',
'dnsbl.webequipped.com',
'dnsbl.wpbl.pc9.org',
'dnsbl.zapbl.net',
'dnsbl6.anticaptcha.net',
'dnsblchile.org',
'dnsrbl.org',
'dnsrbl.swinog.ch',
'dnswl.inps.de',
'dnswl.leisi.net',
'dob.sibl.support-intelligence.net',
'drone.abuse.ch',
'dronebl.noderebellion.net',
'dsn.bl.rfc-ignorant.de',
'dsn.rfc-clueless.org',
'dsn.rfc-ignorant.org',
'dssl.imrss.org',
'duinv.aupads.org',
'dul.blackhole.cantv.net',
'dul.dnsbl.sorbs.net',
'dul.dnsbl.sorbs.netdul.ru',
'dul.orca.bc.ca',
'dul.pacifier.net',
'dul.ru',
'dyn.nszones.com',
'dyna.spamrats.com',
'dynablock.easynet.nl',
'dynablock.sorbs.net',
'dynamic.dnsbl.rangers.eu.org',
'dyndns.rbl.jp',
'dynip.rothen.com',
'elitist.rfc-clueless.org',
'endn.bl.reynolds.net.au',
'escalations.dnsbl.sorbs.net',
'eswlrev.dnsbl.rediris.es',
'eurospam.spamblocked.com',
'ex.dnsbl.org',
'exitnodes.tor.dnsbl.sectoor.de',
'exitnodes.tor.dnsbl.sectoor.dehttp.dnsbl.sorbs.net',
'feb.spamlab.com',
'fnrbl.fast.net',
'forbidden.icm.edu.pl',
'formmail.relays.monkeys.com',
'free.v4bl.org',
'fresh.dict.rbl.arix.com',
'fresh.sa_slip.rbl.arix.com',
'fresh.spameatingmonkey.net',
'fresh10.spameatingmonkey.net',
'fresh15.spameatingmonkey.net',
'fulldom.rfc-clueless.org',
'geobl.spameatingmonkey.net',
'gl.suomispam.net',
'hbl.atlbl.net',
'helo-domain.sjesl.monkeys.com',
'hijacked.dnsiplists.completewhois.com',
'hil.habeas.com',
'hong-kong.rominet.net',
'hostkarma.junkemailfilter.com',
'hostkarma.junkemailfilter.com[brl]',
'http.dnsbl.sorbs.net',
'httpbl.abuse.ch',
'hul.habeas.com',
'iadb.isipp.com',
'iadb2.isipp.com',
'iana-classa.bogons.dnsiplists.completewhois.com',
'iddb.isipp.com',
'images.rbl.msrbl.net',
'in.dnsbl.org',
'inputs.orbz.org',
'intercept.datapacket.net',
'intruders.docs.uu.se',
'invalidipwhois.dnsiplists.completewhois.com',
'ip.v4bl.org',
'ipbl.zeustracker.abuse.ch',
'ips.backscatterer.org',
'ips.whitelisted.org',
'ipv6.all.dnsbl.bit.nl',
'ipv6.all.s5h.net',
'ipwhois.rfc-ignorant.org',
'is-tor.kewlio.net.uk',
'ispmx.pofon.foobar.hu',
'isps.spamblocked.com',
'ix.dnsbl.manitu.net',
'korea.rominet.net',
'korea.services.net',
'ksi.dnsbl.net.au',
'l1.apews.org',
'l1.apews.rhsbl.sorbs.net',
'l1.bbfh.ext.sorbs.net',
'l1.spews.dnsbl.sorbs.net',
'l2.apews.dnsbl.sorbs.net',
'l2.bbfh.ext.sorbs.net',
'l2.spews.dnsbl.sorbs.net',
'l3.bbfh.ext.sorbs.net',
'l4.bbfh.ext.sorbs.net',
'lacnic-main.bogons.dnsiplists.completewhois.com',
'lacnic.spamblocked.com',
'lame.dnsbl.rangers.eu.org',
'lbl.lagengymnastik.dk',
'list.anonwhois.net',
'list.bbfh.org',
'list.blogspambl.com',
'list.dnswl.org',
'list.quorum.to',
'mail-abuse.blacklist.jippg.org',
'mail.people.it',
'manual.orbz.gst-group.co.uk',
'misc.dnsbl.sorbs.net',
'mr-out.imrss.org',
'msgid.bl.gweep.ca',
'mtawlrev.dnsbl.rediris.es',
'multi.surbl.org',
'multi.uribl.com',
'netbl.spameatingmonkey.net',
'netblock.pedantic.org',
'netblockbl.spamgrouper.com',
'netblockbl.spamgrouper.to',
'netscan.rbl.blockedservers.com',
'new.dnsbl.sorbs.net',
'new.spam.dnsbl.sorbs.net',
'nml.mail-abuse.org',
'no-more-funn.moensted.dk',
'nobl.junkemailfilter.com',
'nomail.rhsbl.sorbs.net',
'noptr.spamrats.com',
'noservers.dnsbl.sorbs.net',
'nospam.ant.pl',
'nsbl.fmb.la',
'old.dnsbl.sorbs.net',
'old.spam.dnsbl.sorbs.net',
'omrs.dnsbl.net.au',
'opm.tornevall.org',
'orbs.dorkslayers.com',
'orbz.gst-group.co.uk',
'origin.asn.cymru.com',
'origin.asn.spameatingmonkey.net',
'origin6.asn.cymru.com',
'orvedb.aupads.org',
'osps.dnsbl.net.au',
'osrs.dnsbl.net.au',
'outputs.orbz.org',
'owfs.dnsbl.net.au',
'pacbelldsl.compu.net',
'paidaccessviarsync',
'pbl.spamhaus.org',
'pdl.bl.reynolds.net.au',
'peer.asn.cymru.com',
'phishing.rbl.msrbl.net',
'plus.bondedsender.org',
'pm0-no-more.compu.net',
'pofon.foobar.hu',
'policy.lbl.lagengymnastik.dk',
'postmaster.rfc-clueless.org',
'postmaster.rfc-ignorant.org',
'ppbl.beat.st',
'probes.dnsbl.net.au',
'probes.dnsbl.net.auproxy.bl.gweep.ca',
'problems.dnsbl.sorbs.net',
'proxies.blackholes.easynet.nl',
'proxies.dnsbl.sorbs.net',
'proxies.exsilia.net',
'proxies.relays.monkeys.com',
'proxy.bl.gweep.ca',
'proxy.block.transip.nl',
'proxy.drbl.be.net.ru',
'psbl.surriel.com',
'pss.spambusters.org.ar',
'q.mail-abuse.com',
'query.bondedsender.org',
'query.senderbase.org',
'r.mail-abuse.com',
'rabl.nuclearelephant.com',
'random.bl.gweep.ca',
'rbl-plus.mail-abuse.org',
'rbl.abuse.ro',
'rbl.atlbl.net',
'rbl.blakjak.net',
'rbl.blockedservers.com',
'rbl.bulkfeeds.jp',
'rbl.cbn.net.id',
'rbl.choon.net',
'rbl.dns-servicios.com',
'rbl.echelon.pl',
'rbl.efnet.org',
'rbl.efnethelp.net',
'rbl.efnetrbl.org',
'rbl.eznettools.com',
'rbl.fasthosts.co.uk',
'rbl.firstbase.com',
'rbl.init1.nl',
'rbl.interserver.net',
'rbl.iprange.net',
'rbl.ipv6wl.eu',
'rbl.jp',
'rbl.lugh.ch',
'rbl.ma.krakow.pl',
'rbl.mail-abuse.org',
'rbl.megarbl.net',
'rbl.ntvinet.net',
'rbl.pil.dk',
'rbl.polarcomm.net',
'rbl.rope.net',
'rbl.schulte.org',
'rbl.snark.net',
'rbl.spamlab.com',
'rbl.suresupport.com',
'rbl.talkactive.net',
'rbl.triumf.ca',
'rbl2.triumf.ca',
'rdts.bl.reynolds.net.au',
'rdts.dnsbl.net.au',
'recent.dnsbl.sorbs.net',
'recent.spam.dnsbl.sorbs.net',
'relayips.rbl.shub-inter.net',
'relays.bl.gweep.ca',
'relays.bl.kundenserver.de',
'relays.dnsbl.sorbs.net',
'relays.dorkslayers.com',
'relays.mail-abuse.org',
'relays.nether.net',
'relays.radparker.com',
'relays.sandes.dk',
'relaywatcher.n13mbl.com',
'rep.mailspike.net',
'reputation-domain.rbl.scrolloutf1.com',
'reputation-ip.rbl.scrolloutf1.com',
'reputation-ns.rbl.scrolloutf1.com',
'residential.block.transip.nl',
'rf.senderbase.org',
'rhsbl.rymsho.ru',
'rhsbl.scientificspam.net',
'rhsbl.sorbs.net',
'rhsbl.zapbl.net',
'ricn.dnsbl.net.au',
'ripe-main.bogons.dnsiplists.completewhois.com',
'rmst.dnsbl.net.au',
'rot.blackhole.cantv.net',
'rsbl.aupads.org',
'rwl.choon.net',
'sa-accredit.habeas.com',
'sa.senderbase.org',
'safe.dnsbl.sorbs.net',
'sbl-xbl.spamhaus.org',
'sbl.nszones.com',
'sbl.spamhaus.org',
'schizo-bl.kundenserver.de',
'score.senderscore.com',
'sender-address.sjesl.monkeys.com',
'sender-domain-validate.sjesl.monkeys.com',
'sender-domain.sjesl.monkeys.com',
'service.mailwhitelist.com',
'short.fmb.la',
'short.rbl.jp',
'singlebl.spamgrouper.com',
'singular.ttk.pte.hu',
'smtp.dnsbl.sorbs.net',
'socks.dnsbl.sorbs.net',
'sohul.habeas.com',
'sorbs.dnsbl.net.au',
'spam.abuse.ch',
'spam.dnsbl.anonmails.de',
'spam.dnsbl.rangers.eu.org',
'spam.dnsbl.sorbs.net',
'spam.exsilia.net',
'spam.lbl.lagengymnastik.dk',
'spam.olsentech.net',
'spam.pedantic.org',
'spam.rbl.blockedservers.com',
'spam.rbl.msrbl.net',
'spam.shri.net',
'spam.spamrats.com',
'spam.wonk.org',
'spam.wytnij.to',
'spam.zapjunk.com',
'spamblock.kundenserver.de',
'spambot.bls.digibase.ca',
'spamdomain.block.transip.nl',
'spamdomains.blackholes.easynet.nl',
'spamguard.leadmon.net',
'spamips.rbl.shub-inter.net',
'spamlist.or.kr',
'spamrbl.imp.ch',
'spamsource.block.transip.nl',
'spamsources.fabel.dk',
'spamsources.spamblocked.com',
'spamsupport.dnsbl.rangers.eu.org',
'spbl.bl.winbots.org',
'spews.block.transip.nl',
'srn.surgate.net',
'srnblack.surgate.net',
'st.technovision.dk',
'stabl.rbl.webiron.net',
'stale.dict.rbl.arix.com',
'stale.sa_slip.arix.com',
'superblock.ascams.com',
'swl.spamhaus.org',
't3direct.dnsbl.net.au',
'taiwan.rominet.net',
'tor.dan.me.uk',
'tor.dnsbl.sectoor.de',
'tor.efnet.org',
'torexit.dan.me.uk',
'torserver.tor.dnsbl.sectoor.de',
'truncate.gbudb.net',
'trusted.nether.net',
'ubl.lashback.com',
'ubl.nszones.com',
'ubl.unsubscore.com',
'unsure.nether.net',
'uribl.abuse.ro',
'uribl.pofon.foobar.hu',
'uribl.spameatingmonkey.net',
'uribl.swinog.ch',
'uribl.zeustracker.abuse.ch',
'urired.spameatingmonkey.net',
'url.rbl.jp',
'v4.fullbogons.cymru.com',
'v6.fullbogons.cymru.com',
'vbl.mookystick.com',
'virbl.bit.nl',
'virbl.dnsbl.bit.nl',
'virus.rbl.jp',
'virus.rbl.msrbl.net',
'vote.drbl.be.net.ru',
'vote.drbl.caravan.ru',
'vote.drbl.croco.net',
'vote.drbl.dataforce.net',
'vote.drbl.gremlin.ru',
'vote.drbl.host.kz',
'vote.drbldf.dsbl.ru',
'vote.rbl.ntvinet.net',
'vouch.dwl.spamhaus.org',
'wadb.isipp.com',
'wbl.triumf.ca',
'wdl.bl.reynolds.net.au',
'web.dnsbl.sorbs.net',
'web.rbl.msrbl.net',
'whitelist.sci.kun.nl',
'whitelist.surriel.com',
'whois.rfc-clueless.org',
'whois.rfc-ignorant.org',
'wl.mailspike.net',
'wl.nszones.com',
'wl.shlink.org',
'wl.summersault.com',
'wl.trusted-forwarder.org',
'work.drbl.caravan.ru',
'work.drbl.croco.net',
'work.drbl.dataforce.net',
'work.drbl.gremlin.ru',
'work.drbl.host.kz',
'work.drbldf.dsbl.ru',
'worm.dnsbl.rangers.eu.org',
'wormrbl.imp.ch',
'worms-bl.kundenserver.de',
'wpb.bl.reynolds.net.au',
'xbl.selwerd.cx',
'xbl.spamhaus.org',
'ybl.megacity.org',
'z.mailspike.net',
'zebl.zoneedit.com',
'zen.spamhaus.org',
'zombie.dnsbl.sorbs.net',
'zta.birdsong.org',
'ztl.dorkslayers.com',
'zz.countries.nerd.dk'
]
async def check_dnsbl(ip: str, dnsbl: str, semaphore: asyncio.Semaphore):
'''
Check if an IP address is blacklisted on a DNSBL.
:param ip: IP address to check.
:param dnsbl: DNSBL to check.
:param semaphore: Semaphore to limit the number of concurrent requests.
'''
async with semaphore:
reversed_ip = '.'.join(reversed(ip.split('.')))
try:
resolver = aiodns.DNSResolver()
lookup = f'{reversed_ip}.{dnsbl}'
for item in await resolver.query(lookup, 'TXT'):
response = await resolver.query(lookup, 'A')
if response:
print(f'{GREEN}{ip} is blacklisted on {dnsbl}: {response[0].host}{RESET}')
else:
if args.verbose:
print(f'{RED}{ip} has no reply from {dnsbl}{RESET}')
except aiodns.error.DNSError as e:
if args.verbose:
if e.args[0] == 4:
print(f'{GREY}{ip} is not blacklisted on {dnsbl}{RESET}')
else:
print(f'{RED}{ip} errored on {dnsbl} with {lookup}: {e}{RESET}')
async def main(ip, concurrency):
semaphore = asyncio.Semaphore(concurrency)
tasks = [check_dnsbl(ip, dnsbl, semaphore) for dnsbl in DNSBL_LIST]
await asyncio.gather(*tasks)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='DNSBL Lookup Tool')
parser.add_argument('input', help='IP address or file with IP addresses')
parser.add_argument('-c', '--concurrency', type=int, default=50, help='Number of concurrent lookups')
parser.add_argument('-v', '--verbose', action='store_true', help='Enable verbose output')
args = parser.parse_args()
try:
ipaddress.ip_address(args.input)
asyncio.run(main(args.input, args.concurrency))
except:
if os.path.isfile(args.input):
with open(args.input, 'r') as file:
for line in file:
ip = line.strip()
try:
ipaddress.ip_address(ip)
asyncio.run(main(args.input, args.concurrency))
except:
logging.warning(f'Invalid IP address: {ip}')
else:
raise SystemExit(f'Invalid IP address or file: {args.input}')

Binary file not shown.

78
dronebl/dronebl.py Normal file
View File

@ -0,0 +1,78 @@
#!/usr/bin/env python
# Copyright (c) 2008 DroneBL contributors
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the author nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class DroneBLClient:
"""Class for accessing DroneBL."""
def __init__(self, rpckey=None, server="https://dronebl.org/RPC2"):
self.server = server
self.rpckey = rpckey
self.submitList = ""
def addIP(self, ip, type):
"""Adds IP to DroneBL."""
self.submitList += "\t<add ip='" + ip + "' type='" + str(type) + "' />\n";
def lookupIP(self, ip):
"""Adds a lookup request to the message."""
self.submitList += "\t<lookup ip='" + ip + "' />\n"
def makeRequest(self):
"""Generates the request."""
self.request = "<?xml version=\"1.0\"?>\n<request key='" + self.rpckey + "'>\n" + self.submitList + "</request>"
def showRequest(self):
"""Shows the request."""
self.makeRequest()
print self.request
def makeConnection(self):
"""Connects to the RPC server."""
import urllib
type, uri = urllib.splittype(self.server)
self.__host, self.__handler = urllib.splithost(uri)
import httplib
self.connection = httplib.HTTPConnection(self.__host)
def postRequest(self):
"""Executes the request."""
self.makeRequest()
self.makeConnection()
self.connection.putrequest("POST", self.__handler)
self.connection.putheader("Content-Type", "text/xml")
self.connection.putheader("Content-Length", str(int(len(self.request))))
self.connection.endheaders()
self.connection.send(self.request)
self.__response = self.connection.getresponse()
def printResponse(self):
"""Display the XML response."""
print self.__response.read()

Binary file not shown.

29
dronebl/remove.sh Normal file
View File

@ -0,0 +1,29 @@
#!/bin/bash
cd "$(dirname "$0")"
KEY="$(<dronebl.key)"
(
echo "<?xml version=\"1.0\"?><request key=\"$KEY\">"
while [ -n "$1" ] ; do
echo "<lookup ip=\"$1\" />"
shift
done
echo "</request>"
) \
| curl -s --data @- https://dronebl.org/RPC2 \
| (xmllint --xpath '/response/result/@id' - 2>/dev/null | sed -n -e 's, id="\([^"]*\)",\1\n,gp') \
|(
echo "<?xml version=\"1.0\"?><request key=\"$KEY\">"
while read ID ; do
echo "Remove ID $ID" >&2
echo "<remove id=\"$ID\" />"
done
echo "</request>"
) \
| tee -a dronebl-remove.log \
| curl -s --data @- https://dronebl.org/RPC2 | tee -a dronebl-remove.log | grep -q "\"success\""
if [ $? -eq 0 ] ; then
echo "DRONEBL: successfully removed $@"
else
echo "DRONEBL: error removing $@"
fi

175
dronebl/submit.sh Normal file
View File

@ -0,0 +1,175 @@
#!/bin/bash
# syntax: dronebl-submit.sh [bantype [host|IP|datafile [host|IP|datafile [etc.]]]
# where datafile contains one host or IP per line.
# This script will sort | uniq datafiles and query for existing active listings, so
# duplicate entries are no problem.
#
# dependencies: bash, wget, standard GNU utils (host / sed / grep / sort / etc)
#
# Version history:
# 2.1 -- fixed a logic error; removed the removal of /tmp/dronebl-*.xml files on error
# 2.0 -- completely rewritten for RPC2 (although argument syntax is backward-
# compatible)
RPCKEY="/etc/fail2ban/dronebl.rpckey" # key, or path to file containing rpckey
REPORT_TO="https://dronebl.org/RPC2"
### end of user variables ###
if [ ! -w "/tmp" ]; then
echo "Unable to write to /tmp. Please ensure the disk is not full, and that this account has appropriate permissions."
exit 1
fi
if [ -f "$RPCKEY" ]; then
if [ -r "$RPCKEY" ]; then
RPCKEY=`cat $RPCKEY`
else
echo "RPC key in $RPCKEY is unreadable. Exiting."
exit 1
fi
fi
function wash { # wash <hostname> -- sets $IP by reference
ADDR=$1
TEST=`echo "${ADDR}." | grep -E "^([0-9]{1,3}\.){4}$"`
if [ "$TEST" ]; then
VALID=0
else
VALID=1
fi
if [ "$VALID" = "1" ]; then
echo -n "Looking up $ADDR... "
ADDR=`host $ADDR | grep -E -o -e '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$'`
TEST=`echo "${ADDR}." | grep -E "^([0-9]{1,3}\.){4}$"`
if [ "$TEST" ]; then
echo "$ADDR"
else
echo "Unable to resolve. Skipping."
return 1
fi
fi
eval "IP=$ADDR"
return 0
}
function rinse { # rinse <bantype> -- if bantype is contained in the deprecated list, exit
DEPRECATED=( 4 11 12 )
for dep in ${DEPRECATED[@]}; do
if [ "$BANTYPE" == "$dep" ]; then
echo "Bantype $BANTYPE has been deprecated. The DroneBL is probably not the appropriate listing service for this sort of activity. Please visit us on irc.atheme.org in #dronebl if you believe otherwise. Exiting."
exit 1
fi
done
}
function checkerror { #checkerror <xmlfile> -- prints error messages from xml and exits
ERROR=`grep -i error $1`
if [ "$ERROR" ]; then
ERROR=`grep '<code>' $1 | sed -r -e 's/<[^>]*>//g' -e 's/^\s*//g'`
ERROR="$ERROR: `grep '<message>' $1 | sed -r -e 's/<[^>]*>//g' -e 's/^\s*//g'`"
echo "The server returned an error ($ERROR) -- see /tmp/dronebl-query.xml and /tmp/dronebl-response.xml for full details."
exit 1
fi
}
if [ "$2" = "" ]; then
echo -n 'Syntax:
'$0' [bantype [host|IP|datafile [host|IP|datafile [etc.]]]]
Types are as follows:
2 = Sample
3 = IRC Drone
4 = Tor exit node (deprecated)
5 = Bottler
6 = Unknown spambot or drone
7 = DDOS Drone
8 = SOCKS Proxy
9 = HTTP Proxy
10 = ProxyChain
11 = Machines and netblocks compromised or owned by MediaDefender (deprecated)
12 = Trolls (deprecated)
13 = Brute force attackers
14 = Open Wingate
15 = Open Router
255 = Unknown
Which type? '
read BANTYPE
rinse $BANTYPE
echo -n "What's the hostname / IP address? "
read ADDR
wash $ADDR
if [ $? ]; then
IPLIST[0]=$IP
else
echo "Unable to resolve $ADDR. Exiting."
exit 1
fi
else
rinse $1
args=($@)
echo "A little housekeeping..."
for (( x=1; x<${#args[@]}; x++ )); do
if [ "${args[$x]}" != "" ]; then
filename="${args[$x]}"
if [ ! -r "$filename" ]; then filename="$PWD/${args[$x]}"; fi
if [ -r "$filename" ]; then
for i in `sort -u $PWD/${args[$x]}`; do
wash $i
if [ $? ]; then IPLIST[${#IPLIST[@]}]=$IP; fi
done
else
wash ${args[$x]}
if [ $? ]; then IPLIST[${#IPLIST[@]}]=$IP; fi
fi
fi
done
IPLIST=( `for (( x=0; x<${#IPLIST[@]}; x++ )) ; do echo ${IPLIST[$x]}; done | sort -u` )
BANTYPE=$1
fi
POSTFILE="/tmp/dronebl-query.xml"
RESPONSEFILE="/tmp/dronebl-response.xml"
echo "Housekeeping finished. Working with ${#IPLIST[@]} unique, valid addresses."
if [ ${#IPLIST[@]} -eq 0 ]; then
echo "No hosts to report. Exiting."
exit 0
fi
echo "Checking for exiting entries... "
echo "<?xml version=\"1.0\"?>
<request key='"$RPCKEY"'>" >$POSTFILE
for i in ${IPLIST[@]}; do
echo " <lookup ip='$i' />" >>$POSTFILE
done
echo "</request>" >>$POSTFILE
wget -q --post-file="$POSTFILE" -O "$RESPONSEFILE" --header="Content-Type: text/xml" $REPORT_TO
checkerror $RESPONSEFILE
grepfor='type="'$BANTYPE'"'
for i in `grep 'listed="1"' $RESPONSEFILE | grep $grepfor | grep -E -o -e '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | sort -u`; do
IPLIST=( ${IPLIST[@]%%$i} )
echo "$i is already active in the DroneBL database as ban type $BANTYPE. Removing."
done
if [ ${#IPLIST[@]} -eq 0 ]; then
echo "No hosts to report. Exiting."
exit 0
elif [ ${#IPLIST[@]} -eq 1 ]; then
echo -n "Reporting ${IPLIST[@]} as ban type $BANTYPE... "
else
echo -n "Reporting ${#IPLIST[@]} hosts as ban type $BANTYPE... "
fi
echo "<?xml version=\"1.0\"?>
<request key='"$RPCKEY"'>" >$POSTFILE
for i in ${IPLIST[@]}; do
if [ "`echo ${i}. | grep -E '^([0-9]{1,3}\.){4}$'`" != "" ]; then echo " <add ip='$i' type='$BANTYPE' />" >>$POSTFILE; fi
done
echo "</request>" >>$POSTFILE
wget -q --post-file="$POSTFILE" -O "$RESPONSEFILE" --header="Content-Type: text/xml" $REPORT_TO
checkerror $RESPONSEFILE
echo "done."
rm -f /tmp/dronebl*.xml
exit 0

View File

@ -1,6 +1,9 @@
#!/usr/bin/env python
# FloodBL - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
# This script will check a list of proxies aginst DNS Blackhole (DNSBL) lists to see if they are blackholed.
# Todo: Add support for asynchronous DNSBL lookups and proper IPv6 support.
import argparse
import concurrent.futures
import ipaddress
@ -48,7 +51,12 @@ blackholes = {
}
}
def check(proxy):
def check(proxy: str):
'''
Check if a proxy is blackholed.
:param proxy: the proxy to check in the format of ip:port
'''
proxy_ip = proxy.split(':')[0]
formatted_ip = ipaddress.ip_address(proxy_ip).reverse_pointer
for blackhole in blackholes:
@ -72,32 +80,41 @@ def check(proxy):
if proxy not in bad:
good.append(proxy)
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('FloodBL Blackhole Checker'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')
parser.add_argument('-t', '--threads', help='number of threads (default: 100)', default=100, type=int)
args = parser.parse_args()
if not os.path.isfile(args.input):
raise SystemExit('no such input file')
initial = len(open(args.input).readlines())
proxies = set([proxy.split(':')[0] for proxy in re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', open(args.input).read(), re.MULTILINE)]) # TODO: handle IPv6 better
if not proxies:
raise SystemExit('no proxies found from input file')
with concurrent.futures.ThreadPoolExecutor(max_workers=args.threads) as executor:
checks = {executor.submit(check, proxy): proxy for proxy in proxies}
for future in concurrent.futures.as_completed(checks):
checks[future]
good.sort()
with open(args.output, 'w') as output_file:
output_file.write('\n'.join(good))
print('\033[34mTotal\033[0m : ' + format(len(proxies), ',d'))
print('\033[34mGood\033[0m : ' + format(len(good), ',d'))
print('\033[34mBad\033[0m : ' + format(len(proxies)-len(good), ',d'))
if __name__ == '__main__':
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('FloodBL Blackhole Checker'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')
parser.add_argument('-t', '--threads', help='number of threads (default: 100)', default=100, type=int)
args = parser.parse_args()
if not os.path.isfile(args.input):
raise SystemExit('no such input file')
initial = len(open(args.input).readlines())
proxies = set([proxy.split(':')[0] for proxy in re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', open(args.input).read(), re.MULTILINE)]) # TODO: handle IPv6 better
if not proxies:
raise SystemExit('no proxies found from input file')
with concurrent.futures.ThreadPoolExecutor(max_workers=args.threads) as executor:
checks = {executor.submit(check, proxy): proxy for proxy in proxies}
for future in concurrent.futures.as_completed(checks):
checks[future]
good.sort()
with open(args.output, 'w') as output_file:
output_file.write('\n'.join(good))
print('\033[34mTotal\033[0m : ' + format(len(proxies), ',d'))
print('\033[34mGood\033[0m : ' + format(len(good), ',d'))
print('\033[34mBad\033[0m : ' + format(len(proxies)-len(good), ',d'))

57
proxy_sources.txt Normal file
View File

@ -0,0 +1,57 @@
https://api.openproxylist.xyz/socks4.txt
https://api.openproxylist.xyz/socks5.txt
https://api.proxyscrape.com/?request=displayproxies&proxytype=socks4
https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks4
https://api.proxyscrape.com/?request=displayproxies&proxytype=socks5
https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks5
https://proxy-list.download/api/v1/get?type=socks4
https://proxy-list.download/api/v1/get?type=socks5
https://proxyscan.io/download?type=socks4
https://proxyscan.io/download?type=socks5
https://proxyspace.pro/socks4.txt
https://proxyspace.pro/socks5.txt
https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks4.txt
https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks5.txt
https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS4.txt
https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS5.txt
https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt
https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks4.txt
https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks5.txt
https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-socks4.txt
https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-socks5.txt
https://raw.githubusercontent.com/manuGMG/proxy-365/main/SOCKS4.txt
https://raw.githubusercontent.com/manuGMG/proxy-365/main/SOCKS5.txt
https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks4.txt
https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks5.txt
https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks4.txt
https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks5.txt
https://raw.githubusercontent.com/monosans/proxy-list/main/proxies_anonymous/socks4.txt
https://raw.githubusercontent.com/monosans/proxy-list/main/proxies_anonymous/socks5.txt
https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt
https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks5.txt
https://raw.githubusercontent.com/officialputuid/KangProxy/KangProxy/socks5/socks4.txt
https://raw.githubusercontent.com/officialputuid/KangProxy/KangProxy/socks5/socks5.txt
https://raw.githubusercontent.com/prxchk/proxy-list/main/socks4.txt
https://raw.githubusercontent.com/prxchk/proxy-list/main/socks5.txt
https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies/socks4.txt
https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies/socks5.txt
https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies_anonymous/socks4.txt
https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies_anonymous/socks5.txt
https://raw.githubusercontent.com/roosterkid/openproxylist/main/SOCKS4_RAW.txt
https://raw.githubusercontent.com/roosterkid/openproxylist/main/SOCKS5_RAW.txt
https://raw.githubusercontent.com/RX4096/proxy-list/main/online/socks4.txt
https://raw.githubusercontent.com/RX4096/proxy-list/main/online/socks5.txt
https://raw.githubusercontent.com/saschazesiger/Free-Proxies/master/proxies/socks4.txt
https://raw.githubusercontent.com/saschazesiger/Free-Proxies/master/proxies/socks5.txt
https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks4.txt
https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks5.txt
https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt
https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt
https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/socks4.txt
https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/socks5.txt
https://raw.githubusercontent.com/Zaeem20/FREE_PROXIES_LIST/master/socks4.txt
https://raw.githubusercontent.com/Zaeem20/FREE_PROXIES_LIST/master/socks5.txt
https://raw.githubusercontent.com/zevtyardt/proxy-list/main/socks4.txt
https://raw.githubusercontent.com/zevtyardt/proxy-list/main/socks5.txt
https://spys.me/socks.txt
https://spys.one/en/socks-proxy-list/

126
pythonproxy.md Normal file
View File

@ -0,0 +1,126 @@
# Proxy usage with Python
## [aiosocks](https://pypi.org/project/aiosocks/)
```python
import asyncio
import aiosocks
async def proxy_example(proxy: str, use_ssl: bool = False):
'''Proxy can be in IP:PORT format or USER:PASS@IP:PORT format'''
auth = proxy.split('@')[0].split(':') if '@' in proxy else None
proxy_ip, proxy_port = proxy.split('@')[1].split(':') if '@' in proxy else proxy.split(':')
options = {
'proxy' : aiosocks.Socks5Addr(proxy_ip, proxy_port),
'proxy_auth' : aiosocks.Socks5Auth(*auth) if auth else None,
'dst' : (host, port),
'limit' : 1024,
'ssl' : ssl._create_unverified_context() if use_ssl else None,
'family' : 2 # 2 = IPv4 | 10 = IPv6
}
reader, writer = await asyncio.wait_for(aiosocks.open_connection(**options), 15) # 15 second timeout
while True:
data = await asyncio.wait_for(reader.readuntil(b'\r\n'), 300) # 5 minute timeout on no data received
print(data.decode().strip()) # Print the response from the server
```
## [aiohttp](https://pypi.org/project/aiohttp)
```python
import asyncio
import aiohttp
async def proxy_example(proxy: str, url: str):
'''Proxy can be in IP:PORT format or USER:PASS@IP:PORT format'''
async with aiohttp.ClientSession() as session:
async with session.get('https://google.com', proxy=f'http://{proxy}', timeout=15) as response:
if response.status == 200: # 200 = success
print(response.text()) # Print the response from the server
```
## [http.client](https://docs.python.org/3/library/http.client.html)
I really don't use this library much at all, so this is some LM generated function...
```python
import base64
import http.client
def proxy_example(proxy: str, url):
'''Proxy can be in IP:PORT format or USER:PASS@IP:PORT format'''
auth = proxy.split('@')[0].split(':') if '@' in proxy else None
proxy_host, proxy_port = proxy.split('@')[1].split(':') if '@' in proxy else proxy.split(':')
scheme, rest = url.split('://', 1)
host, path = rest.split('/', 1)
path = '/' + path
if scheme == 'https':
conn = http.client.HTTPConnection(proxy_host, proxy_port)
conn.request('CONNECT', host)
response = conn.getresponse()
if response.status != 200:
print("Failed to establish a tunnel via proxy.")
print(response.status, response.reason)
return
conn = http.client.HTTPSConnection(proxy_host, proxy_port, context=None)
conn.set_tunnel(host)
else:
conn = http.client.HTTPConnection(proxy_host, proxy_port)
path = url
headers = {}
if auth:
auth = base64.b64encode(f'{auth[0]}:{auth[1]}'.encode()).decode()
headers['Proxy-Authorization'] = f'Basic {auth}'
conn.request('GET', path, headers=headers)
response = conn.getresponse()
print(response.status, response.reason)
if response.status == 200:
data = response.read()
print(data.decode())
conn.close()
```
## [requests](https://pypi.org/project/requests/)
```python
import requests
def proxy_example(proxy: str, url: str):
'''Proxy can be in IP:PORT format or USER:PASS@IP:PORT format'''
proxy_handler = {'http': 'http://'+proxy, 'https': 'https://'+proxy}
response = requests.get(url, proxies=proxies)
print(response.text)
```
## [urllib.request](https://docs.python.org/3/library/urllib.html)
```python
import urllib.request
def proxy_example(proxy: str, url: str):
'''Proxy can be in IP:PORT format or USER:PASS@IP:PORT format'''
proxy_handler = urllib.request.ProxyHandler({'http': proxy, 'https': proxy})
opener = urllib.request.build_opener(proxy_handler)
if '@' in proxy: # Handle authentication
creds, address = proxy.split('@')
username, password = creds.split(':')
auth_header = urllib.request.HTTPBasicAuthHandler()
auth_header.add_password(realm=None, uri=proxy, user=username, passwd=password)
opener.add_handler(auth_header)
urllib.request.install_opener(opener)
response = urllib.request.urlopen(url, timeout=15)
if response.code == 200:
print(response.read().decode())
```

View File

@ -1,20 +0,0 @@
#!/bin/env bash
# shellscrape - developed by acidvegas (https://git.acid.vegas/proxytools)
URLS=(
"https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks4.txt"
"https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks5.txt"
"https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS4.txt"
"https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS5.txt"
"https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt"
"https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks4.txt"
"https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks5.txt"
)
[ -f proxies.txt ] >proxies.txt
for URL in "${URLS[@]}"; do
echo "Downloading from $URL"
curl -s $URL >> proxies.txt &
done
sort -u -o proxies.txt proxies.txt
echo "done"

34
shellsocked Executable file
View File

@ -0,0 +1,34 @@
#!/bin/env bash
# shellsocked - developed by acidvegas (https://git.acid.vegas/proxytools)
# Probably the most basic proxy scraper ever made, pure POSIX, no dependencies, no bullshit.
# Duplicate proxies are removed and the output is sorted and saved to a file.
# Feed it a single URL or a file with a list of URLs to scrape.
scrape_url() {
local url="$1"
local proxies=$(curl -s -A "ShellSocked/1.0" "$url" | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+' | awk '!seen[$0]++')
local count=$(echo "$proxies" | wc -l)
PROXIES="${PROXIES}${proxies}"
echo -e "Found \033[32m${count}\033[0m proxies on \033[33m${url}\033[0m"
}
if [ -n "$1" ]; then
PROXIES=""
if [ -f "$1" ]; then
while IFS= read -r url; do
scrape_url "$url"
done < "$1"
else
scrape_url "$1"
fi
else
echo "Usage: $0 <input_file | single_url>"
exit 1
fi
PROXIES=$(printf "%s\n" "$PROXIES" | sort -u)
printf "%s\n" "$PROXIES" > proxies.txt
total_count=$(echo "$PROXIES" | wc -l)
echo "Grand Total: ${total_count} proxies"

View File

@ -1,111 +1,72 @@
#!/usr/bin/env python
# SockHub Proxy Scraper - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
'''
There is a file in this repository called proxy_sources.txt which contains a list of URLs to scrape for proxies.
This list it not maintained and may contain dead links or links to sites that no longer contain proxies.
'''
import concurrent.futures
import logging
import os
import re
import urllib.request
# Can be any URL containing a list of IP:PORT proxies (does not have to be socks5)
# The current list contains proxy sources that are updated frequently with new proxies
# Almost all of the Github repos pull from the same place & contain duplicates (which are removed)
urls = set((
'https://api.openproxylist.xyz/socks4.txt',
'https://api.openproxylist.xyz/socks5.txt',
'https://api.proxyscrape.com/?request=displayproxies&proxytype=socks4',
'https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks4',
'https://api.proxyscrape.com/?request=displayproxies&proxytype=socks5',
'https://api.proxyscrape.com/v2/?request=displayproxies&protocol=socks5',
'https://proxy-list.download/api/v1/get?type=socks4',
'https://proxy-list.download/api/v1/get?type=socks5',
'https://proxyscan.io/download?type=socks4',
'https://proxyscan.io/download?type=socks5',
'https://proxyspace.pro/socks4.txt',
'https://proxyspace.pro/socks5.txt',
'https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks4.txt',
'https://raw.githubusercontent.com/ALIILAPRO/Proxy/main/socks5.txt',
'https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS4.txt',
'https://raw.githubusercontent.com/B4RC0DE-TM/proxy-list/main/SOCKS5.txt',
'https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt',
'https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks4.txt',
'https://raw.githubusercontent.com/HyperBeats/proxy-list/main/socks5.txt',
'https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-socks4.txt',
'https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-socks5.txt',
'https://raw.githubusercontent.com/manuGMG/proxy-365/main/SOCKS4.txt',
'https://raw.githubusercontent.com/manuGMG/proxy-365/main/SOCKS5.txt',
'https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks4.txt',
'https://raw.githubusercontent.com/mmpx12/proxy-list/master/socks5.txt',
'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks4.txt',
'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks5.txt',
'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies_anonymous/socks4.txt',
'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies_anonymous/socks5.txt',
'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks4.txt',
'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks5.txt',
'https://raw.githubusercontent.com/officialputuid/KangProxy/KangProxy/socks5/socks4.txt',
'https://raw.githubusercontent.com/officialputuid/KangProxy/KangProxy/socks5/socks5.txt',
'https://raw.githubusercontent.com/prxchk/proxy-list/main/socks4.txt',
'https://raw.githubusercontent.com/prxchk/proxy-list/main/socks5.txt',
'https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies/socks4.txt',
'https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies/socks5.txt',
'https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies_anonymous/socks4.txt',
'https://raw.githubusercontent.com/rdavydov/proxy-list/main/proxies_anonymous/socks5.txt',
'https://raw.githubusercontent.com/roosterkid/openproxylist/main/SOCKS4_RAW.txt',
'https://raw.githubusercontent.com/roosterkid/openproxylist/main/SOCKS5_RAW.txt',
'https://raw.githubusercontent.com/RX4096/proxy-list/main/online/socks4.txt',
'https://raw.githubusercontent.com/RX4096/proxy-list/main/online/socks5.txt',
'https://raw.githubusercontent.com/saschazesiger/Free-Proxies/master/proxies/socks4.txt',
'https://raw.githubusercontent.com/saschazesiger/Free-Proxies/master/proxies/socks5.txt',
'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks4.txt',
'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks5.txt',
'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt',
'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt',
'https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/socks4.txt',
'https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/socks5.txt',
'https://raw.githubusercontent.com/Zaeem20/FREE_PROXIES_LIST/master/socks4.txt',
'https://raw.githubusercontent.com/Zaeem20/FREE_PROXIES_LIST/master/socks5.txt',
'https://raw.githubusercontent.com/zevtyardt/proxy-list/main/socks4.txt',
'https://raw.githubusercontent.com/zevtyardt/proxy-list/main/socks5.txt',
'https://spys.me/socks.txt',
'https://spys.one/en/socks-proxy-list/'
))
def get_source(url: str) -> str:
''' Get the source of a URL using a Googlebot user-agent. '''
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)')
source = urllib.request.urlopen(req, timeout=15)
return source.read().decode()
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('SockHub Proxy Scraper'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
total = 0
# Global
proxies = list()
proxy_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proxies.txt')
print('scanning \033[35m{0:,}\033[0m urls from list...'.format(len(urls)))
for url in urls: # TODO: Maybe add concurrent.futures support for using larger lists
def find_proxies(url: str) -> str:
'''
Check a URL for IP:PORT proxies.
:param url: The URL to check for proxies.
'''
try:
source = get_source(url)
except:
print('found \033[31m0\033[0m new proxies on \033[34m{0}\033[0m \033[30m(failed to load)\033[0m'.format(url))
else:
total+= len(source.split())
found = set([proxy for proxy in re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', source, re.MULTILINE) if proxy not in proxies])
if found:
proxies += found
print('found \033[32m{0:,}\033[0m new proxies on \033[34m{1}\033[0m'.format(len(found), url))
source = urllib.request.urlopen(urllib.request.Request(url, headers={'User-Agent': 'SockHub/1.0'})).read().decode()
if source:
found = set(re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', source, re.MULTILINE))
if (new_proxies := [proxy for proxy in found if proxy not in proxies]):
proxies += new_proxies
print(f'found \033[32m{len(found):,}\033[0m new proxies on \033[34m{url}\033[0m')
else:
print('found \033[31m0\033[0m new proxies on \033[34m{0}\033[0m \033[30m(duplicates)\033[0m'.format(url))
if proxies:
if len(proxies) < total:
print('found \033[32m{0:,}\033[0m total proxies! \033[30m({1:,} duplicates removed)\033[0m'.format(len(proxies), total-len(proxies)))
logging.warning(f'found \033[31m0\033[0m new proxies on \033[34m{url}\033[0m \033[30m(source is empty)\033[0m')
except Exception as ex:
logging.error(f'found \033[31m0\033[0m new proxies on \033[34m{url}\033[0m \033[30m({ex})\033[0m')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='SockHub Proxy Scraper - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)')
parser.add_argument('-i', '--input', help='input file containing a list of URLs to scrape (one per line) or a single URL')
parser.add_argument('-o', '--output', help='output file to save proxies to', default='proxies.txt')
parser.add_argument('-c', '--concurrency', help='number of concurrent threads to use (default: 10)', default=10, type=int)
args = parser.parse_args()
logging.basicConfig(format='%(message)s', level=logging.INFO)
if not os.path.isfile(args.input):
if args.input.startswith('https://') or args.input.startswith('http://'):
logging.info('using input as a single url...')
proxy_sources = [args.input]
else:
logging.fatal('input file does not exist!')
proxy_sources = open(args.input, 'r').read().split('\n')
if not proxy_sources:
logging.fatal('proxy sources input file is empty!')
logging.debug('scanning \033[35m{len(urls):,}\033[0m urls from list...')
with concurrent.futures.ThreadPoolExecutor(max_workers=args.concurrency) as executor:
futures = [executor.submit(find_proxies, url) for url in proxy_sources]
concurrent.futures.wait(futures)
if proxies:
logging.info('found \033[32m{len(proxies):,}\033[0m total proxies!')
proxies.sort()
with open (args.output, 'w') as output_file:
for proxy in proxies:
output_file.write(proxy + '\n')
else:
print('found \033[32m{0:,}\033[0m total proxies!'.format(len(proxies)))
proxies.sort()
with open (proxy_file, 'w') as proxy__file:
for proxy in proxies:
proxy__file.write(proxy + '\n')
logging.warning('no proxies found!')

View File

@ -1,7 +1,11 @@
#!/usr/bin/env python
# Tor Glass - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
import json
'''
A simple script to pull a list of all the Tor relays / exit nodes & generate a json database.
The example below will generate a map of all the Tor relays / exit nodes using the ipinfo.io API.
'''
try:
import stem.descriptor.remote
@ -52,25 +56,31 @@ def get_descriptors() -> dict:
tor_map['relay'].append(data)
return tor_map
if __name__ == '__main__':
import json
print('loading Tor descriptors... (this could take a while)')
tor_data = get_descriptors()
with open('tor.json', 'w') as fd:
json.dump(tor_data['relay'], fd)
with open('tor.exit.json', 'w') as fd:
json.dump(tor_data['exit'], fd)
print('Relays: {0:,}'.foramt(len(tor_data['relay'])))
print('Exits : {0:,}'.format(len(tor_data['exit'])))
try:
import ipinfo
except ImportError:
print('missing optional library \'ipinfo\' (https://pypi.org/project/ipinfo/) for map visualization')
else:
try:
handler = ipinfo.getHandler('changeme') # put your ipinfo.io API key here
print('Relay Map: ' + handler.getMap([ip['address'] for ip in tor_data['relay']]))
print('Exit Map: ' + handler.getMap([ip['address'] for ip in tor_data['exit']]))
except ipinfo.errors.AuthorizationError:
print('error: invalid ipinfo.io API key (https://ipinfo.io/signup)')
except Exception as ex:
print(f'error generating ipinfo map ({ex})')
raise ImportError('missing optional library \'ipinfo\' (https://pypi.org/project/ipinfo/) for map visualization')
try:
handler = ipinfo.getHandler('changeme') # put your ipinfo.io API key here
print('Relay Map: ' + handler.getMap([ip['address'] for ip in tor_data['relay']]))
print('Exit Map: ' + handler.getMap([ip['address'] for ip in tor_data['exit']]))
except ipinfo.errors.AuthorizationError:
print('error: invalid ipinfo.io API key (https://ipinfo.io/signup)')
except Exception as ex:
print(f'error generating ipinfo map ({ex})')

View File

@ -19,8 +19,12 @@ EXIT_FINGERPRINT = '379FB450010D17078B3766C2273303C358C3A442' # https://metrics.
SOCKS_PORT = 9050
CONNECTION_TIMEOUT = 30 # timeout before we give up on a circuit
def query(url):
''' Uses pycurl to fetch a site using the proxy on the SOCKS_PORT. '''
def query(url: str):
'''
Uses pycurl to fetch a site using the proxy on the SOCKS_PORT.
:param url: the url to fetch
'''
output = io.StringIO.StringIO()
query = pycurl.Curl()
query.setopt(pycurl.URL, url)
@ -36,7 +40,12 @@ def query(url):
raise ValueError("Unable to reach %s (%s)" % (url, exc))
def scan(controller, path):
''' Test the connection to a website through the given path of relays using the given controller '''
'''
Test the connection to a website through the given path of relays using the given controller.
:param controller: the controller to use
:param path: a list of fingerprints, in order, to build a path through
'''
circuit_id = controller.new_circuit(path, await_build = True)
def attach_stream(stream):
if stream.status == 'NEW':
@ -54,12 +63,13 @@ def scan(controller, path):
controller.reset_conf('__LeaveStreamsUnattached')
# Main
with stem.control.Controller.from_port(port=9056) as controller:
controller.authenticate('loldongs')
relay_fingerprints = [desc.fingerprint for desc in controller.get_network_statuses()]
for fingerprint in relay_fingerprints:
try:
time_taken = scan(controller, [fingerprint, EXIT_FINGERPRINT])
print('%s => %0.2f seconds' % (fingerprint, time_taken))
except Exception as exc:
print('%s => %s' % (fingerprint, exc))
if __name__ == '__main__':
with stem.control.Controller.from_port(port=9056) as controller:
controller.authenticate('CHANGEME') # Change this to your Tor control password
relay_fingerprints = [desc.fingerprint for desc in controller.get_network_statuses()]
for fingerprint in relay_fingerprints:
try:
time_taken = scan(controller, [fingerprint, EXIT_FINGERPRINT])
print('%s => %0.2f seconds' % (fingerprint, time_taken))
except Exception as exc:
print('%s => %s' % (fingerprint, exc))