1
mirror of https://github.com/acidvegas/avoidr.git synced 2024-12-28 01:16:38 +00:00

Updated using local database for instantly results

This commit is contained in:
Dionysus 2023-07-19 17:46:54 -04:00
parent 49efd1e653
commit a876e8687a
Signed by: acidvegas
GPG Key ID: EF4B922DB85DC9DE
8 changed files with 2292401 additions and 64997 deletions

View File

@ -6,23 +6,7 @@
## Information
This is still a work in progress.
This is just a little side project I am working on that will search keywords in a database of **Autonomous System Numbers** *(ASN)*. The ASN is then turned into a list of its respective IP ranges that fall under it using the [BGP View API](https://bgpview.docs.apiary.io/).
Below is a list of queries we look for:
```python
['754th Electronic Systems Group', 'Air Force Systems Command', 'Army & Navy Building', 'Central Intelligence Agency', 'Defense Advanced Research Projects Agency',
'Department of Homeland Security', 'Department of Justice', 'Department of Transportation', 'DoD Network Information Center', 'Dod Joint Spectrum Center',
'FBI Criminal Justice Information Systems', 'Institute of Nuclear Power Operations, Inc', 'Merit Network Inc', 'NASA Ames Research Center', 'NASA Deep Space Network (DSN)',
'NASA Goddard Space Flight Center', 'Navy Federal Credit Union', 'Navy Network Information Center', 'Nuclear Science and Technology Organisation',
'Organization for Nuclear Research', 'Root Server Technical Operations', 'Securities & Exchange Commission', 'Securities And Exchange Commission', 'U. S. Air Force',
'U. S. Bureau of the Census', 'U. S. Department of Transportation', 'U.S. Department of Energy', 'USAISC', 'USDOE, NV Operations Office', 'United States Antarctic Program',
'United States Coast Guard', 'United States Geological Survey', 'United States Naval Institute', 'United States Nuclear Regulatory Commission',
'United States Patent and Trademark Office', 'United States Postal Service', 'Internet Exchange', 'Stock Exchange','Federal Emergency Management Agency','Federal Aviation Agency',
'Federal Energy Regulatory Commission','Federal Aviation Administration','Federal Deposit Insurance Corporation','Federal Reserve Board', 'National Aeronautics and Space Administration',
'US National Institute of Standards & Technology','Government Telecommunications and Informatics Services','U.S. Dept. of Commerce','U.S. Center For Disease Control and Prevention',
'U.S. Fish and Wildlife Service','Department of National Defence','U.S. Department of State','Bank of America','JPMorgan Chase & Co','Facebook Inc','Twitter Inc']
```
This is just a little side project I am working on that will search keywords in a database of **Autonomous System Numbers** *(ASN)*. The ASN is then turned into a list of its respective IP ranges that fall under it.
The ranges are all stored in a JSON file for easy parsing. Depending on what you are scanning for, this list can be altered to better suit your needs.

File diff suppressed because it is too large Load Diff

View File

@ -1,154 +1,92 @@
#/usr/bin/env python
# avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr)
import hashlib
import ipaddress
import json
import os
import random
import urllib.request
from zipfile import ZipFile
#try:
# import masscan
#except ImportError:
# raise SystemExit('error: missing required \'python-masscan\' library (https://pypi.org/project/python-masscan/)')
# Globals
grand_total = {'4': 0, '6': 0}
results = dict()
reserved = {
'4' : {
'0.0.0.0/8' : '"This" network',
'10.0.0.0/8' : 'Private networks',
'100.64.0.0/10' : 'Carrier-grade NAT - RFC 6598',
'127.0.0.0/8' : 'Host loopback',
'169.254.0.0/16' : 'Link local',
'172.16.0.0/12' : 'Private networks',
'192.0.0.0/24' : 'IETF Protocol Assignments',
'192.0.0.0/29' : 'DS-Lite',
'192.0.0.170/32' : 'NAT64',
'192.0.0.171/32' : 'DNS64',
'192.0.2.0/24' : 'Documentation (TEST-NET-1)',
'192.31.196.0/24' : 'AS112-v4',
'192.52.193.0/24' : 'AMT',
'192.88.99.0/24' : '6to4 Relay Anycast',
'192.168.0.0/16' : 'Private networks',
'192.175.48.0/24' : 'AS112 Service',
'198.18.0.0/15' : 'Benchmarking',
'198.51.100.0/24' : 'Documentation (TEST-NET-2)',
'203.0.113.0/24' : 'Documentation (TEST-NET-3)',
'224.0.0.0/4' : 'IP Multicast',
'233.252.0.0/24' : 'MCAST-TEST-NET',
'240.0.0.0/4' : 'Reserved',
'255.255.255.255/32' : 'Limited Broadcast'
},
'6': {
'::/128' : 'Unspecified address',
'::1/128' : 'Loopback address',
'::ffff:0:0/96' : 'IPv4-mapped addresses',
'::ffff:0:0:0/96' : 'IPv4 translated addresses',
'64:ff9b::/96' : 'IPv4/IPv6 translation',
'64:ff9b:1::/48' : 'IPv4/IPv6 translation',
'100::/64' : 'Discard prefix',
'2001:0000::/32' : 'Teredo tunneling',
'2001:20::/28' : 'ORCHIDv2',
'2001:db8::/32' : 'Addresses used in documentation and example source code',
'2002::/16' : 'The 6to4 addressing scheme (deprecated)',
'fc00::/7' : 'Unique local address',
'fe80::/64' : 'Link-local address',
'ff00::/8' : 'Multicast address'
}
}
def calculate_hash(path):
hash_sha1 = hashlib.sha1()
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
hash_sha1.update(chunk)
return hash_sha1.hexdigest()
asn_queries = ['754th Electronic Systems Group', 'Air Force Systems Command', 'Army & Navy Building', 'Central Intelligence Agency', 'Defense Advanced Research Projects Agency',
'Department of Homeland Security', 'Department of Justice', 'Department of Transportation', 'DoD Network Information Center', 'Dod Joint Spectrum Center',
'FBI Criminal Justice Information Systems', 'Institute of Nuclear Power Operations, Inc', 'Merit Network Inc', 'NASA Ames Research Center', 'NASA Deep Space Network (DSN)',
'NASA Goddard Space Flight Center', 'Navy Federal Credit Union', 'Navy Network Information Center', 'Nuclear Science and Technology Organisation',
'Organization for Nuclear Research', 'Root Server Technical Operations', 'Securities & Exchange Commission', 'Securities And Exchange Commission', 'U. S. Air Force',
'U. S. Bureau of the Census', 'U. S. Department of Transportation', 'U.S. Department of Energy', 'USAISC', 'USDOE, NV Operations Office', 'United States Antarctic Program',
'United States Coast Guard', 'United States Geological Survey', 'United States Naval Institute', 'United States Nuclear Regulatory Commission',
'United States Patent and Trademark Office', 'United States Postal Service', 'Internet Exchange', 'Stock Exchange','Federal Emergency Management Agency','Federal Aviation Agency',
'Federal Energy Regulatory Commission','Federal Aviation Administration','Federal Deposit Insurance Corporation','Federal Reserve Board', 'National Aeronautics and Space Administration',
'US National Institute of Standards & Technology','Government Telecommunications and Informatics Services','U.S. Dept. of Commerce','U.S. Center For Disease Control and Prevention',
'U.S. Fish and Wildlife Service','Department of National Defence','U.S. Department of State','Bank of America','JPMorgan Chase & Co','Facebook Inc','Twitter Inc']
def get_url(url, git=False):
data = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
if git:
data['Accept'] = 'application/vnd.github.v3+json'
req = urllib.request.Request(url, headers=data)
return urllib.request.urlopen(req, timeout=10).read().decode()
def ASNquery(asn):
head = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
req = urllib.request.Request(f'https://api.bgpview.io/asn/{asn[2:]}', headers=head)
data = json.loads(urllib.request.urlopen(req).read())
return (data['data']['name'], data['data']['description_short'])
def update_database():
DB = 'databases/fullASN.json.zip'
try:
os.mkdir('databases')
except FileExistsError:
pass
if os.path.exists(DB):
old_hash = calculate_hash(DB)
new_hash = json.loads(get_url('https://api.github.com/repos/ipapi-is/ipapi/contents/'+DB))['sha']
if old_hash != new_hash:
print('[~] New database version available! Downloading...')
os.remove(DB)
if os.path.exists(DB[:-4]):
os.remove(DB[:-4])
urllib.request.urlretrieve('https://github.com/ipapi-is/ipapi/raw/main/'+DB, DB)
with ZipFile(DB) as zObject:
zObject.extract(DB[10:-4], 'databases')
else:
print('[~] Downloading missing database...')
urllib.request.urlretrieve('https://github.com/ipapi-is/ipapi/raw/main/'+DB, DB)
if os.path.exists(DB[:-4]):
os.remove(DB[:-4])
with ZipFile(DB) as zObject:
zObject.extract(DB[10:-4], 'databases')
def ASNranges(asn, desc):
head = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
req = urllib.request.Request(f'https://api.bgpview.io/asn/{asn[2:]}/prefixes', headers=head)
data = json.loads(urllib.request.urlopen(req).read())
ranges = dict()
for version in ('4','6'):
if pdata := [x['prefix'] for x in data['data'][f'ipv{version}_prefixes']]:
ranges[version] = pdata
return ranges
def process_asn(data):
if data['asn'] not in results:
title = data['descr'] if 'org' not in data else data['descr'] + ' / ' + data['org']
results[data['asn']] = {'name': title, 'ranges': dict()}
if 'prefixes' in data:
results[data['asn']]['ranges']['4'] = data['prefixes']
total = total_ips(data['prefixes'])
grand_total['4'] += total
print('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv4 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixes']), total))
if 'prefixesIPv6' in data:
results[data['asn']]['ranges']['6'] = data['prefixesIPv6']
total = total_ips(data['prefixesIPv6'])
grand_total['6'] += total
print('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv6 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixesIPv6']), total))
class Parser:
def microsoft_office():
urls = (
'https://endpoints.office.com/endpoints/USGOVDoD?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
'https://endpoints.office.com/endpoints/USGOVGCCHigh?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
'https://endpoints.office.com/endpoints/worldwide?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
'https://endpoints.office.com/endpoints/China?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7'
)
ranges = {'IPv4': list(), 'IPv6': list()}
for url in urls:
data = json.loads(urllib.request.urlopen(url).read())
all_ranges = [item for sublist in [item['ips'] for item in data if 'ips' in item] for item in sublist]
ranges['IPv4'] += [item for item in all_ranges if ':' not in item]
ranges['IPv6'] += [item for item in all_ranges if ':' in item]
return ranges
def google(): # NOTE: These are non-cloud ranges
data = json.loads(urllib.request.urlopen('https://www.gstatic.com/ipranges/goog.json').read().decode())
ranges = {'4': list(), '6': list()}
ranges['4'] += [item['ipv4Prefix'] for item in data['prefixes'] if 'ipv4Prefix' in item]
ranges['6'] += [item['ipv6Prefix'] for item in data['prefixes'] if 'ipv6Prefix' in item]
return ranges
def total_ips(ranges, total=0):
for _range in ranges:
total += ipaddress.ip_network(_range).num_addresses
return total
# Main
bad_asn = json.loads(open('bad.json').read()) if os.path.isfile('bad.json') else dict()
asn_list = open('asn.txt').readlines()
bad_list = dict()
database = dict()
grand_total = {'4': 0, '6': 0}
for item in asn_list:
item = item.rstrip()
for query in asn_queries:
if query.lower() in item.lower():
asn = item.split()[0]
desc = item.split(' - ')[1] if ' - ' in item else ' '.join(item.split()[2:])
if asn in bad_asn:
print('Skippiing bad ASN... ('+asn+')')
else:
found = ASNranges(asn, desc)
if found:
for version in found:
total = 0
for ranges in found[version]:
total += ipaddress.ip_network(ranges).num_addresses
grand_total[version] += ipaddress.ip_network(ranges).num_addresses
print(f'Found \033[32m{len(found[version]):,}\033[0m IPv{version} ranges \033[1;30m({total:,})\033[0m on \033[93m{asn}\033[0m \033[1;30m({desc})\033[0m')
database[asn] = {'desc': desc, 'ranges': found}
else:
print(f'Found \033[1;31m0\033[0m IP ranges on \033[93m{asn}\033[0m \033[1;30m({desc})\033[0m')
bad_list[asn] = desc
database['reserved'] = {'4': reserved['4'],'6': reserved['6']}
for version in database['reserved']:
total = 0
for ranges in database['reserved'][version]:
total += ipaddress.ip_network(ranges).num_addresses
grand_total[version] += ipaddress.ip_network(ranges).num_addresses
print('Found \033[32m{0:,}\033[0m IPv{1} ranges \033[1;30m({2:,})\033[0m on \033[93mRESERVED\033[0m \033[1;30m({3})\033[0m'.format(len(database['reserved'][version]), version, total, database['reserved'][version][ranges]))
with open('db.json', 'w') as fp:
json.dump(database, fp)
with open('bad.json', 'w') as fp:
json.dump(bad_list, fp)
print('[~] Checking for database updates...')
update_database()
data = json.loads(open('databases/fullASN.json').read())
queries = [item.rstrip() for item in open('custom.txt').readlines()]
print('[~] Searching {len(queries):,} queries against {len(data):,} ASNs...')
for item in data:
for field in [x for x in data[item] if x in ('descr','org')]:
if [x for x in queries if x.lower() in data[item][field].lower()]:
process_asn(data[item])
break
with open('out.json', 'w') as fp:
json.dump(results, fp)
total_v4 = ipaddress.ip_network('0.0.0.0/0').num_addresses
total_v6 = ipaddress.ip_network('::/0').num_addresses
print('Total IPv4 Addresses : {0:,}'.format(total_v4))
print('Total IPv4 After Clean : {0:,}'.format(total_v4-grand_total['4']))
print('Total IPv6 Addresses : {0:,}'.format(total_v6))
print('Total IPv6 After Clean : {0:,}'.format(total_v6-grand_total['6']))
#mas = masscan.PortScanner() mas.scan('172.0.8.78/24', ports='22,80,8080', arguments='--max-rate 1000') print(mas.scan_result)

63
avoidr/custom.txt Normal file
View File

@ -0,0 +1,63 @@
754th Electronic Systems Group
Air Force Systems Command
Army & Navy Building
Autonomous nonprofit organisation Computer Incident Response Center
Bank of America
Central Intelligence Agency
Defense Advanced Research Projects Agency
Department of Homeland Security
Department of Justice
Department of National Defence
Department of Transportation
Dept. of Information Technology & Cyber Security
DoD Network Information Center
Dod Joint Spectrum Center
FBI Criminal Justice Information Systems
Facebook Inc
Federal Aviation Administration
Federal Aviation Agency
Federal Deposit Insurance Corporation
Federal Emergency Management Agency
Federal Energy Regulatory Commission
Federal Reserve Board
GitHub, Inc
Government Telecommunications and Informatics Services
ICANN
Institute of Nuclear Power Operations
Internet Exchange
InterNIC Registration Services
JPMorgan Chase & Co
Merit Network Inc
NASA Ames Research Center
NASA Deep Space Network
NASA Goddard Space Flight Center
National Aeronautics and Space Administration
National Telecommunications
Navy Federal Credit Union
Navy Network Information Center
Nuclear Science and Technology Organisation
Organization for Nuclear Research
Packet Clearing House
Root Server Technical Operations
Securities & Exchange Commission
Securities And Exchange Commission
Stock Exchange
Twitter Inc
U. S. Air Force
U. S. Bureau of the Census
U. S. Department of Transportation
U.S. Center For Disease Control and Prevention
U.S. Department of Energy
U.S. Department of State
U.S. Dept. of Commerce
U.S. Fish and Wildlife Service
US National Institute of Standards & Technology
USAISC
USDOE, NV Operations Office
United States Antarctic Program
United States Coast Guard
United States Geological Survey
United States Naval Institute
United States Nuclear Regulatory Commission
United States Patent and Trademark Office
United States Postal Service

2292266
avoidr/databases/fullASN.json Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -1,10 +0,0 @@
#/usr/bin/env python
# avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr)
asn = open('asn.txt').readlines()
while True:
query = input('Search: ')
for i in asn:
if query.lower() in i.lower():
print(i.rstrip())