Updated proxytools suite with general code improvements & cleanup

This commit is contained in:
Dionysus 2023-06-10 01:03:46 -04:00
parent f04cd03d67
commit 2e8b3fbfba
Signed by: acidvegas
GPG Key ID: EF4B922DB85DC9DE
5 changed files with 96 additions and 7 deletions

View File

@ -1,10 +1,6 @@
# proxytools
> collection of scripts for harvesting & testing proxies
## Requirements
- [Python](https://www.python.org/downloads/) *(**Note:** This script was developed to be used with the latest version of Python)*
- [PySocks](https://pypi.python.org/pypi/PySocks) *(Required for [cleansocks.py](proxytools/cleansocks.py))*
## Mirrors
- [acid.vegas](https://git.acid.vegas/proxytools)
- [GitHub](https://github.com/acidvegas/proxytools)

View File

@ -31,6 +31,13 @@ def check(proxy):
good.append(proxy)
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('CleanSOCKS Proxy Cleaner'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')

View File

@ -20,18 +20,25 @@ print_bad = True
def check(proxy):
formatted_ip = '.'.join(proxy.split('.')[::-1])
for dnsbl in blackholes:
for blackhole in blackholes:
try:
socket.gethostbyname(f'{formatted_ip}.{dnsbl}')
socket.gethostbyname(f'{formatted_ip}.{blackhole}')
except socket.gaierror:
if print_bad:
print('\033[1;31mBAD\033[0m \033[30m|\033[0m ' + proxy.ljust(22) + f'\033[30m({dnsbl})\033[0m')
print('\033[1;31mBAD\033[0m \033[30m|\033[0m ' + proxy.ljust(22) + f'\033[30m({blackhole})\033[0m')
break
else:
print('\033[1;32mGOOD\033[0m \033[30m|\033[0m ' + proxy)
good.append(proxy)
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('FloodBL Blackhole Checker'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
parser = argparse.ArgumentParser(usage='%(prog)s <input> <output> [options]')
parser.add_argument('input', help='file to scan')
parser.add_argument('output', help='file to output')

69
proxytools/sockhub.py Normal file
View File

@ -0,0 +1,69 @@
#!/usr/bin/env python
# SockSpot Proxy Scraper - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
import os
import re
import time
import urllib.request
github_list = (
'https://raw.githubusercontent.com/officialputuid/KangProxy/KangProxy/socks5/socks5.txt',
'https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt',
'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/socks5.txt',
'https://raw.githubusercontent.com/roosterkid/openproxylist/main/SOCKS5_RAW.txt',
'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/socks5.txt',
'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt',
'https://raw.githubusercontent.com/jetkai/proxy-list/main/online-proxies/txt/proxies-socks5.txt',
'https://raw.githubusercontent.com/UptimerBot/proxy-list/main/proxies/socks5.txt',
'https://raw.githubusercontent.com/ShiftyTR/Proxy-List/master/socks5.txt',
'https://raw.githubusercontent.com/saschazesiger/Free-Proxies/master/proxies/socks5.txt'
)
def get_source(url):
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)')
source = urllib.request.urlopen(req, timeout=5)
return source.read().decode()
# Main
print('#'*56)
print('#{0}#'.format(''.center(54)))
print('#{0}#'.format('SockHub Proxy Scraper'.center(54)))
print('#{0}#'.format('Developed by acidvegas in Python'.center(54)))
print('#{0}#'.format('https://git.acid.vegas/proxytools'.center(54)))
print('#{0}#'.format(''.center(54)))
print('#'*56)
bad_urls = list()
dupes = 0
proxy_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proxies.txt')
proxy_list = list()
set(github_list)
print('scanning \033[35m{0:,}\033[0m urls from list...'.format(len(github_list)))
for url in github_list:
try:
source = get_source(url)
found = re.findall('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+', source, re.MULTILINE)
if found:
print('found \033[93m{0:,}\033[0m proxies on \033[34m{1}\033[0m'.format(len(found), url))
for proxy in found:
if proxy not in proxy_list:
proxy_list.append(proxy)
else:
dupes += 1
else:
print('no proxies found on ' + url)
except:
bad_urls.append(url)
if bad_urls:
print('failed to load {0:,} urls'.format(len(bad_urls)))
for url in bad_urls:
print('failed to load ' + url)
if proxy_list:
if dupes:
print('found \033[32m{0:,}\033[0m total proxies! \033[30m({1:,} duplicates removed)\033[0m'.format(len(proxy_list), dupes))
else:
print('found \033[32m{0:,}\033[0m total proxies!'.format(len(proxy_list)))
proxy_list.sort()
with open (proxy_file, 'w') as proxy__file:
for proxy in proxy_list:
proxy__file.write(proxy + '\n')

View File

@ -1,6 +1,16 @@
#!/usr/bin/env python
# SockSpot - Developed by acidvegas in Python (https://git.acid.vegas/proxytools)
'''
This script will scan popular blogspots that posts freesh proxies daily
Edit: It seems Blogspots for proxies in 2023 is no longer a reliable source.
This code is old & will possibly be updated again in the future.
'''
import datetime
import json
import base64