mirror of
https://github.com/acidvegas/avoidr.git
synced 2024-12-29 09:56:38 +00:00
Updated using local database for instantly results
This commit is contained in:
parent
49efd1e653
commit
a876e8687a
18
README.md
18
README.md
@ -6,23 +6,7 @@
|
|||||||
## Information
|
## Information
|
||||||
This is still a work in progress.
|
This is still a work in progress.
|
||||||
|
|
||||||
This is just a little side project I am working on that will search keywords in a database of **Autonomous System Numbers** *(ASN)*. The ASN is then turned into a list of its respective IP ranges that fall under it using the [BGP View API](https://bgpview.docs.apiary.io/).
|
This is just a little side project I am working on that will search keywords in a database of **Autonomous System Numbers** *(ASN)*. The ASN is then turned into a list of its respective IP ranges that fall under it.
|
||||||
|
|
||||||
Below is a list of queries we look for:
|
|
||||||
|
|
||||||
```python
|
|
||||||
['754th Electronic Systems Group', 'Air Force Systems Command', 'Army & Navy Building', 'Central Intelligence Agency', 'Defense Advanced Research Projects Agency',
|
|
||||||
'Department of Homeland Security', 'Department of Justice', 'Department of Transportation', 'DoD Network Information Center', 'Dod Joint Spectrum Center',
|
|
||||||
'FBI Criminal Justice Information Systems', 'Institute of Nuclear Power Operations, Inc', 'Merit Network Inc', 'NASA Ames Research Center', 'NASA Deep Space Network (DSN)',
|
|
||||||
'NASA Goddard Space Flight Center', 'Navy Federal Credit Union', 'Navy Network Information Center', 'Nuclear Science and Technology Organisation',
|
|
||||||
'Organization for Nuclear Research', 'Root Server Technical Operations', 'Securities & Exchange Commission', 'Securities And Exchange Commission', 'U. S. Air Force',
|
|
||||||
'U. S. Bureau of the Census', 'U. S. Department of Transportation', 'U.S. Department of Energy', 'USAISC', 'USDOE, NV Operations Office', 'United States Antarctic Program',
|
|
||||||
'United States Coast Guard', 'United States Geological Survey', 'United States Naval Institute', 'United States Nuclear Regulatory Commission',
|
|
||||||
'United States Patent and Trademark Office', 'United States Postal Service', 'Internet Exchange', 'Stock Exchange','Federal Emergency Management Agency','Federal Aviation Agency',
|
|
||||||
'Federal Energy Regulatory Commission','Federal Aviation Administration','Federal Deposit Insurance Corporation','Federal Reserve Board', 'National Aeronautics and Space Administration',
|
|
||||||
'US National Institute of Standards & Technology','Government Telecommunications and Informatics Services','U.S. Dept. of Commerce','U.S. Center For Disease Control and Prevention',
|
|
||||||
'U.S. Fish and Wildlife Service','Department of National Defence','U.S. Department of State','Bank of America','JPMorgan Chase & Co','Facebook Inc','Twitter Inc']
|
|
||||||
```
|
|
||||||
|
|
||||||
The ranges are all stored in a JSON file for easy parsing. Depending on what you are scanning for, this list can be altered to better suit your needs.
|
The ranges are all stored in a JSON file for easy parsing. Depending on what you are scanning for, this list can be altered to better suit your needs.
|
||||||
|
|
||||||
|
64836
avoidr/asn.txt
64836
avoidr/asn.txt
File diff suppressed because it is too large
Load Diff
204
avoidr/avoidr.py
204
avoidr/avoidr.py
@ -1,154 +1,92 @@
|
|||||||
#/usr/bin/env python
|
#/usr/bin/env python
|
||||||
# avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr)
|
# avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr)
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import random
|
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
#try:
|
# Globals
|
||||||
# import masscan
|
grand_total = {'4': 0, '6': 0}
|
||||||
#except ImportError:
|
results = dict()
|
||||||
# raise SystemExit('error: missing required \'python-masscan\' library (https://pypi.org/project/python-masscan/)')
|
|
||||||
|
|
||||||
reserved = {
|
def calculate_hash(path):
|
||||||
'4' : {
|
hash_sha1 = hashlib.sha1()
|
||||||
'0.0.0.0/8' : '"This" network',
|
with open(path, 'rb') as f:
|
||||||
'10.0.0.0/8' : 'Private networks',
|
for chunk in iter(lambda: f.read(4096), b''):
|
||||||
'100.64.0.0/10' : 'Carrier-grade NAT - RFC 6598',
|
hash_sha1.update(chunk)
|
||||||
'127.0.0.0/8' : 'Host loopback',
|
return hash_sha1.hexdigest()
|
||||||
'169.254.0.0/16' : 'Link local',
|
|
||||||
'172.16.0.0/12' : 'Private networks',
|
|
||||||
'192.0.0.0/24' : 'IETF Protocol Assignments',
|
|
||||||
'192.0.0.0/29' : 'DS-Lite',
|
|
||||||
'192.0.0.170/32' : 'NAT64',
|
|
||||||
'192.0.0.171/32' : 'DNS64',
|
|
||||||
'192.0.2.0/24' : 'Documentation (TEST-NET-1)',
|
|
||||||
'192.31.196.0/24' : 'AS112-v4',
|
|
||||||
'192.52.193.0/24' : 'AMT',
|
|
||||||
'192.88.99.0/24' : '6to4 Relay Anycast',
|
|
||||||
'192.168.0.0/16' : 'Private networks',
|
|
||||||
'192.175.48.0/24' : 'AS112 Service',
|
|
||||||
'198.18.0.0/15' : 'Benchmarking',
|
|
||||||
'198.51.100.0/24' : 'Documentation (TEST-NET-2)',
|
|
||||||
'203.0.113.0/24' : 'Documentation (TEST-NET-3)',
|
|
||||||
'224.0.0.0/4' : 'IP Multicast',
|
|
||||||
'233.252.0.0/24' : 'MCAST-TEST-NET',
|
|
||||||
'240.0.0.0/4' : 'Reserved',
|
|
||||||
'255.255.255.255/32' : 'Limited Broadcast'
|
|
||||||
},
|
|
||||||
'6': {
|
|
||||||
'::/128' : 'Unspecified address',
|
|
||||||
'::1/128' : 'Loopback address',
|
|
||||||
'::ffff:0:0/96' : 'IPv4-mapped addresses',
|
|
||||||
'::ffff:0:0:0/96' : 'IPv4 translated addresses',
|
|
||||||
'64:ff9b::/96' : 'IPv4/IPv6 translation',
|
|
||||||
'64:ff9b:1::/48' : 'IPv4/IPv6 translation',
|
|
||||||
'100::/64' : 'Discard prefix',
|
|
||||||
'2001:0000::/32' : 'Teredo tunneling',
|
|
||||||
'2001:20::/28' : 'ORCHIDv2',
|
|
||||||
'2001:db8::/32' : 'Addresses used in documentation and example source code',
|
|
||||||
'2002::/16' : 'The 6to4 addressing scheme (deprecated)',
|
|
||||||
'fc00::/7' : 'Unique local address',
|
|
||||||
'fe80::/64' : 'Link-local address',
|
|
||||||
'ff00::/8' : 'Multicast address'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
asn_queries = ['754th Electronic Systems Group', 'Air Force Systems Command', 'Army & Navy Building', 'Central Intelligence Agency', 'Defense Advanced Research Projects Agency',
|
def get_url(url, git=False):
|
||||||
'Department of Homeland Security', 'Department of Justice', 'Department of Transportation', 'DoD Network Information Center', 'Dod Joint Spectrum Center',
|
data = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
|
||||||
'FBI Criminal Justice Information Systems', 'Institute of Nuclear Power Operations, Inc', 'Merit Network Inc', 'NASA Ames Research Center', 'NASA Deep Space Network (DSN)',
|
if git:
|
||||||
'NASA Goddard Space Flight Center', 'Navy Federal Credit Union', 'Navy Network Information Center', 'Nuclear Science and Technology Organisation',
|
data['Accept'] = 'application/vnd.github.v3+json'
|
||||||
'Organization for Nuclear Research', 'Root Server Technical Operations', 'Securities & Exchange Commission', 'Securities And Exchange Commission', 'U. S. Air Force',
|
req = urllib.request.Request(url, headers=data)
|
||||||
'U. S. Bureau of the Census', 'U. S. Department of Transportation', 'U.S. Department of Energy', 'USAISC', 'USDOE, NV Operations Office', 'United States Antarctic Program',
|
return urllib.request.urlopen(req, timeout=10).read().decode()
|
||||||
'United States Coast Guard', 'United States Geological Survey', 'United States Naval Institute', 'United States Nuclear Regulatory Commission',
|
|
||||||
'United States Patent and Trademark Office', 'United States Postal Service', 'Internet Exchange', 'Stock Exchange','Federal Emergency Management Agency','Federal Aviation Agency',
|
|
||||||
'Federal Energy Regulatory Commission','Federal Aviation Administration','Federal Deposit Insurance Corporation','Federal Reserve Board', 'National Aeronautics and Space Administration',
|
|
||||||
'US National Institute of Standards & Technology','Government Telecommunications and Informatics Services','U.S. Dept. of Commerce','U.S. Center For Disease Control and Prevention',
|
|
||||||
'U.S. Fish and Wildlife Service','Department of National Defence','U.S. Department of State','Bank of America','JPMorgan Chase & Co','Facebook Inc','Twitter Inc']
|
|
||||||
|
|
||||||
def ASNquery(asn):
|
def update_database():
|
||||||
head = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
|
DB = 'databases/fullASN.json.zip'
|
||||||
req = urllib.request.Request(f'https://api.bgpview.io/asn/{asn[2:]}', headers=head)
|
try:
|
||||||
data = json.loads(urllib.request.urlopen(req).read())
|
os.mkdir('databases')
|
||||||
return (data['data']['name'], data['data']['description_short'])
|
except FileExistsError:
|
||||||
|
pass
|
||||||
|
if os.path.exists(DB):
|
||||||
|
old_hash = calculate_hash(DB)
|
||||||
|
new_hash = json.loads(get_url('https://api.github.com/repos/ipapi-is/ipapi/contents/'+DB))['sha']
|
||||||
|
if old_hash != new_hash:
|
||||||
|
print('[~] New database version available! Downloading...')
|
||||||
|
os.remove(DB)
|
||||||
|
if os.path.exists(DB[:-4]):
|
||||||
|
os.remove(DB[:-4])
|
||||||
|
urllib.request.urlretrieve('https://github.com/ipapi-is/ipapi/raw/main/'+DB, DB)
|
||||||
|
with ZipFile(DB) as zObject:
|
||||||
|
zObject.extract(DB[10:-4], 'databases')
|
||||||
|
else:
|
||||||
|
print('[~] Downloading missing database...')
|
||||||
|
urllib.request.urlretrieve('https://github.com/ipapi-is/ipapi/raw/main/'+DB, DB)
|
||||||
|
if os.path.exists(DB[:-4]):
|
||||||
|
os.remove(DB[:-4])
|
||||||
|
with ZipFile(DB) as zObject:
|
||||||
|
zObject.extract(DB[10:-4], 'databases')
|
||||||
|
|
||||||
def ASNranges(asn, desc):
|
def process_asn(data):
|
||||||
head = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}
|
if data['asn'] not in results:
|
||||||
req = urllib.request.Request(f'https://api.bgpview.io/asn/{asn[2:]}/prefixes', headers=head)
|
title = data['descr'] if 'org' not in data else data['descr'] + ' / ' + data['org']
|
||||||
data = json.loads(urllib.request.urlopen(req).read())
|
results[data['asn']] = {'name': title, 'ranges': dict()}
|
||||||
ranges = dict()
|
if 'prefixes' in data:
|
||||||
for version in ('4','6'):
|
results[data['asn']]['ranges']['4'] = data['prefixes']
|
||||||
if pdata := [x['prefix'] for x in data['data'][f'ipv{version}_prefixes']]:
|
total = total_ips(data['prefixes'])
|
||||||
ranges[version] = pdata
|
grand_total['4'] += total
|
||||||
return ranges
|
print('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv4 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixes']), total))
|
||||||
|
if 'prefixesIPv6' in data:
|
||||||
|
results[data['asn']]['ranges']['6'] = data['prefixesIPv6']
|
||||||
|
total = total_ips(data['prefixesIPv6'])
|
||||||
|
grand_total['6'] += total
|
||||||
|
print('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv6 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixesIPv6']), total))
|
||||||
|
|
||||||
class Parser:
|
def total_ips(ranges, total=0):
|
||||||
def microsoft_office():
|
for _range in ranges:
|
||||||
urls = (
|
total += ipaddress.ip_network(_range).num_addresses
|
||||||
'https://endpoints.office.com/endpoints/USGOVDoD?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
|
return total
|
||||||
'https://endpoints.office.com/endpoints/USGOVGCCHigh?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
|
|
||||||
'https://endpoints.office.com/endpoints/worldwide?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7',
|
|
||||||
'https://endpoints.office.com/endpoints/China?clientrequestid=b10c5ed1-bad1-445f-b386-b919946339a7'
|
|
||||||
)
|
|
||||||
ranges = {'IPv4': list(), 'IPv6': list()}
|
|
||||||
for url in urls:
|
|
||||||
data = json.loads(urllib.request.urlopen(url).read())
|
|
||||||
all_ranges = [item for sublist in [item['ips'] for item in data if 'ips' in item] for item in sublist]
|
|
||||||
ranges['IPv4'] += [item for item in all_ranges if ':' not in item]
|
|
||||||
ranges['IPv6'] += [item for item in all_ranges if ':' in item]
|
|
||||||
return ranges
|
|
||||||
|
|
||||||
def google(): # NOTE: These are non-cloud ranges
|
|
||||||
data = json.loads(urllib.request.urlopen('https://www.gstatic.com/ipranges/goog.json').read().decode())
|
|
||||||
ranges = {'4': list(), '6': list()}
|
|
||||||
ranges['4'] += [item['ipv4Prefix'] for item in data['prefixes'] if 'ipv4Prefix' in item]
|
|
||||||
ranges['6'] += [item['ipv6Prefix'] for item in data['prefixes'] if 'ipv6Prefix' in item]
|
|
||||||
return ranges
|
|
||||||
|
|
||||||
# Main
|
# Main
|
||||||
bad_asn = json.loads(open('bad.json').read()) if os.path.isfile('bad.json') else dict()
|
print('[~] Checking for database updates...')
|
||||||
asn_list = open('asn.txt').readlines()
|
update_database()
|
||||||
bad_list = dict()
|
data = json.loads(open('databases/fullASN.json').read())
|
||||||
database = dict()
|
queries = [item.rstrip() for item in open('custom.txt').readlines()]
|
||||||
grand_total = {'4': 0, '6': 0}
|
print('[~] Searching {len(queries):,} queries against {len(data):,} ASNs...')
|
||||||
for item in asn_list:
|
for item in data:
|
||||||
item = item.rstrip()
|
for field in [x for x in data[item] if x in ('descr','org')]:
|
||||||
for query in asn_queries:
|
if [x for x in queries if x.lower() in data[item][field].lower()]:
|
||||||
if query.lower() in item.lower():
|
process_asn(data[item])
|
||||||
asn = item.split()[0]
|
break
|
||||||
desc = item.split(' - ')[1] if ' - ' in item else ' '.join(item.split()[2:])
|
with open('out.json', 'w') as fp:
|
||||||
if asn in bad_asn:
|
json.dump(results, fp)
|
||||||
print('Skippiing bad ASN... ('+asn+')')
|
|
||||||
else:
|
|
||||||
found = ASNranges(asn, desc)
|
|
||||||
if found:
|
|
||||||
for version in found:
|
|
||||||
total = 0
|
|
||||||
for ranges in found[version]:
|
|
||||||
total += ipaddress.ip_network(ranges).num_addresses
|
|
||||||
grand_total[version] += ipaddress.ip_network(ranges).num_addresses
|
|
||||||
print(f'Found \033[32m{len(found[version]):,}\033[0m IPv{version} ranges \033[1;30m({total:,})\033[0m on \033[93m{asn}\033[0m \033[1;30m({desc})\033[0m')
|
|
||||||
database[asn] = {'desc': desc, 'ranges': found}
|
|
||||||
else:
|
|
||||||
print(f'Found \033[1;31m0\033[0m IP ranges on \033[93m{asn}\033[0m \033[1;30m({desc})\033[0m')
|
|
||||||
bad_list[asn] = desc
|
|
||||||
database['reserved'] = {'4': reserved['4'],'6': reserved['6']}
|
|
||||||
for version in database['reserved']:
|
|
||||||
total = 0
|
|
||||||
for ranges in database['reserved'][version]:
|
|
||||||
total += ipaddress.ip_network(ranges).num_addresses
|
|
||||||
grand_total[version] += ipaddress.ip_network(ranges).num_addresses
|
|
||||||
print('Found \033[32m{0:,}\033[0m IPv{1} ranges \033[1;30m({2:,})\033[0m on \033[93mRESERVED\033[0m \033[1;30m({3})\033[0m'.format(len(database['reserved'][version]), version, total, database['reserved'][version][ranges]))
|
|
||||||
with open('db.json', 'w') as fp:
|
|
||||||
json.dump(database, fp)
|
|
||||||
with open('bad.json', 'w') as fp:
|
|
||||||
json.dump(bad_list, fp)
|
|
||||||
total_v4 = ipaddress.ip_network('0.0.0.0/0').num_addresses
|
total_v4 = ipaddress.ip_network('0.0.0.0/0').num_addresses
|
||||||
total_v6 = ipaddress.ip_network('::/0').num_addresses
|
total_v6 = ipaddress.ip_network('::/0').num_addresses
|
||||||
print('Total IPv4 Addresses : {0:,}'.format(total_v4))
|
print('Total IPv4 Addresses : {0:,}'.format(total_v4))
|
||||||
print('Total IPv4 After Clean : {0:,}'.format(total_v4-grand_total['4']))
|
print('Total IPv4 After Clean : {0:,}'.format(total_v4-grand_total['4']))
|
||||||
print('Total IPv6 Addresses : {0:,}'.format(total_v6))
|
print('Total IPv6 Addresses : {0:,}'.format(total_v6))
|
||||||
print('Total IPv6 After Clean : {0:,}'.format(total_v6-grand_total['6']))
|
print('Total IPv6 After Clean : {0:,}'.format(total_v6-grand_total['6']))
|
||||||
#mas = masscan.PortScanner() mas.scan('172.0.8.78/24', ports='22,80,8080', arguments='--max-rate 1000') print(mas.scan_result)
|
|
||||||
|
63
avoidr/custom.txt
Normal file
63
avoidr/custom.txt
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
754th Electronic Systems Group
|
||||||
|
Air Force Systems Command
|
||||||
|
Army & Navy Building
|
||||||
|
Autonomous nonprofit organisation Computer Incident Response Center
|
||||||
|
Bank of America
|
||||||
|
Central Intelligence Agency
|
||||||
|
Defense Advanced Research Projects Agency
|
||||||
|
Department of Homeland Security
|
||||||
|
Department of Justice
|
||||||
|
Department of National Defence
|
||||||
|
Department of Transportation
|
||||||
|
Dept. of Information Technology & Cyber Security
|
||||||
|
DoD Network Information Center
|
||||||
|
Dod Joint Spectrum Center
|
||||||
|
FBI Criminal Justice Information Systems
|
||||||
|
Facebook Inc
|
||||||
|
Federal Aviation Administration
|
||||||
|
Federal Aviation Agency
|
||||||
|
Federal Deposit Insurance Corporation
|
||||||
|
Federal Emergency Management Agency
|
||||||
|
Federal Energy Regulatory Commission
|
||||||
|
Federal Reserve Board
|
||||||
|
GitHub, Inc
|
||||||
|
Government Telecommunications and Informatics Services
|
||||||
|
ICANN
|
||||||
|
Institute of Nuclear Power Operations
|
||||||
|
Internet Exchange
|
||||||
|
InterNIC Registration Services
|
||||||
|
JPMorgan Chase & Co
|
||||||
|
Merit Network Inc
|
||||||
|
NASA Ames Research Center
|
||||||
|
NASA Deep Space Network
|
||||||
|
NASA Goddard Space Flight Center
|
||||||
|
National Aeronautics and Space Administration
|
||||||
|
National Telecommunications
|
||||||
|
Navy Federal Credit Union
|
||||||
|
Navy Network Information Center
|
||||||
|
Nuclear Science and Technology Organisation
|
||||||
|
Organization for Nuclear Research
|
||||||
|
Packet Clearing House
|
||||||
|
Root Server Technical Operations
|
||||||
|
Securities & Exchange Commission
|
||||||
|
Securities And Exchange Commission
|
||||||
|
Stock Exchange
|
||||||
|
Twitter Inc
|
||||||
|
U. S. Air Force
|
||||||
|
U. S. Bureau of the Census
|
||||||
|
U. S. Department of Transportation
|
||||||
|
U.S. Center For Disease Control and Prevention
|
||||||
|
U.S. Department of Energy
|
||||||
|
U.S. Department of State
|
||||||
|
U.S. Dept. of Commerce
|
||||||
|
U.S. Fish and Wildlife Service
|
||||||
|
US National Institute of Standards & Technology
|
||||||
|
USAISC
|
||||||
|
USDOE, NV Operations Office
|
||||||
|
United States Antarctic Program
|
||||||
|
United States Coast Guard
|
||||||
|
United States Geological Survey
|
||||||
|
United States Naval Institute
|
||||||
|
United States Nuclear Regulatory Commission
|
||||||
|
United States Patent and Trademark Office
|
||||||
|
United States Postal Service
|
2292266
avoidr/databases/fullASN.json
Normal file
2292266
avoidr/databases/fullASN.json
Normal file
File diff suppressed because it is too large
Load Diff
BIN
avoidr/databases/fullASN.json.zip
Normal file
BIN
avoidr/databases/fullASN.json.zip
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@ -1,10 +0,0 @@
|
|||||||
#/usr/bin/env python
|
|
||||||
# avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr)
|
|
||||||
|
|
||||||
asn = open('asn.txt').readlines()
|
|
||||||
|
|
||||||
while True:
|
|
||||||
query = input('Search: ')
|
|
||||||
for i in asn:
|
|
||||||
if query.lower() in i.lower():
|
|
||||||
print(i.rstrip())
|
|
Loading…
Reference in New Issue
Block a user