Memory efficency attained via generators for handling larger input files. Code cleaned up, etc
This commit is contained in:
parent
a3e5a3fef7
commit
a69ba1cbc8
12
README.md
12
README.md
@ -16,21 +16,17 @@ Th program will start firing off DNS queries to all the resolvers using the cust
|
||||
|
||||
![](.screens/preview.png)
|
||||
|
||||
After testing across multiple IP addresses over time, if we ever see `download.event.supernets.org` show up on any passive DNS lookup engines, we can simple use the following command:
|
||||
|
||||
```bash
|
||||
jq 'to_entries | map({key: .value, value: .key}) | from_entries | ."download.event"' dns_keys.txt
|
||||
```
|
||||
|
||||
This will return `151.202.0.84`, marking it as a DNS server that is actively logging all DNS queries that pass through.
|
||||
After testing across multiple IP addresses over time, if we ever see `download.event.supernets.org` show up on any passive DNS lookup engines, refer to our logs, which will show it was looked up on `151.202.0.84`, marking it as a DNS server that is actively logging all DNS queries that pass through.
|
||||
|
||||
|
||||
## WORK IN PROGRESS (STAY TUNED)
|
||||
|
||||
- [ ] Bind server running accepting wildcard DNS lookups on custom domain.
|
||||
- [ ] DNS-over-TLS *(DoT)* and DNS-over-HTTPS *(DoH)* support
|
||||
- [X] Hunt down specific DNS servers used by ISP's from an ASN lookup
|
||||
- [ ] Any way to apply this to custom DNS servers used by VPNs?
|
||||
- [X] Any way to apply this to custom DNS servers used by VPNs?
|
||||
- [X] Noise generator to abuse known logging servers.
|
||||
- [X] Memory effiency attains via yielding generators to handle large input files
|
||||
|
||||
This is all very theoretical right now, interested to see how this pans out.
|
||||
|
||||
|
199
pdknockr.py
199
pdknockr.py
@ -1,10 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# Passive DNS Knocker (PDK) - developed by acidvegas in python (https://git.acid.vegas/pdknockr)
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
|
||||
@ -14,7 +15,7 @@ except ImportError:
|
||||
raise SystemExit('missing required \'aiodns\' module (pip install aiodns)')
|
||||
|
||||
|
||||
async def dns_lookup(domain: str, subdomain: str, dns_server: str, dns_type: str, timeout: int, semaphore: asyncio.Semaphore):
|
||||
async def dns_lookup(semaphore: asyncio.Semaphore, domain: str, dns_server: str, record_type: str, timeout: int):
|
||||
'''
|
||||
Perform a DNS lookup on a target domain.
|
||||
|
||||
@ -26,115 +27,139 @@ async def dns_lookup(domain: str, subdomain: str, dns_server: str, dns_type: str
|
||||
:param semaphore: The semaphore to use for concurrency.
|
||||
'''
|
||||
async with semaphore:
|
||||
target = f'{subdomain}.{domain}'
|
||||
resolver = aiodns.DNSResolver(nameservers=[dns_server], timeout=timeout)
|
||||
logging.info(f'\033[96mKnocking {target}\033[0m on \033[93m{dns_server}\033[0m (\033[90m{dns_type}\033[0m)')
|
||||
|
||||
logging.info(f'Knocking {dns_server} with {domain} ({record_type})')
|
||||
|
||||
try:
|
||||
await resolver.query(target, dns_type)
|
||||
except Exception as e:
|
||||
pass
|
||||
await resolver.query(domain, record_type)
|
||||
except:
|
||||
pass # We're just knocking so errors are expected and ignored
|
||||
|
||||
|
||||
def generate_subdomain(sub_domains: list) -> str:
|
||||
def read_domain_file(file_path: str):
|
||||
'''
|
||||
Generate a random subdomain.
|
||||
Generator function to read domains line by line.
|
||||
|
||||
:param sub_domains: The list of subdomains to use.
|
||||
:param file_path: The path to the file containing the DNS servers.
|
||||
'''
|
||||
chosen_domains = random.sample(sub_domains, 2)
|
||||
if random.choice([True, False]):
|
||||
chosen_index = random.choice([0, 1])
|
||||
chosen_domains[chosen_index] = chosen_domains[chosen_index] + str(random.randint(1, 99))
|
||||
return random.choice(['.', '-']).join(chosen_domains)
|
||||
with open(file_path, 'r') as file:
|
||||
while True:
|
||||
for line in file:
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line
|
||||
|
||||
|
||||
async def main(args):
|
||||
def read_dns_file(file_path: str):
|
||||
'''
|
||||
Main function for the program.
|
||||
Generator function to read DNS servers line by line.
|
||||
|
||||
:param args: The arguments passed to the program.
|
||||
:param file_path: The path to the file containing the DNS servers.
|
||||
'''
|
||||
global dns_keys
|
||||
with open(file_path, 'r') as file:
|
||||
while True:
|
||||
for line in file:
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line
|
||||
|
||||
|
||||
def generate_subdomain(sub_domains: list, domain: str, max_size: int):
|
||||
'''
|
||||
Generator function to read subdomains line by line.
|
||||
|
||||
:param sub_domains: The list of subdomains to use for generating noise.
|
||||
'''
|
||||
while True:
|
||||
subs = random.sample(sub_domains, random.randint(2, max_size))
|
||||
|
||||
if random.choice([True, False]):
|
||||
subs_index = random.randint(0, max_size - 1)
|
||||
subs[subs_index] = subs[subs_index] + str(random.randint(1, 99))
|
||||
|
||||
yield random.choice(['.', '-']).join(subs) + '.' + domain
|
||||
|
||||
|
||||
def setup_logging():
|
||||
'''Setup the logging for the program.'''
|
||||
|
||||
os.makedirs('logs', exist_ok=True)
|
||||
|
||||
sh = logging.StreamHandler()
|
||||
sh.setFormatter(logging.Formatter('%(asctime)s | %(levelname)9s | %(message)s', '%I:%M %p'))
|
||||
|
||||
log_filename = time.strftime('pdk_%Y-%m-%d_%H-%M-%S.log')
|
||||
|
||||
fh = logging.handlers.RotatingFileHandler(f'logs/{log_filename}', maxBytes=268435456, encoding='utf-8')
|
||||
fh.setFormatter(logging.Formatter('%(asctime)s | %(levelname)9s | %(message)s', '%Y-%m-%d %I:%M %p'))
|
||||
|
||||
logging.basicConfig(level=logging.NOTSET, handlers=(sh,fh))
|
||||
|
||||
|
||||
async def main():
|
||||
'''Main function for the program.'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Passive DNS Knocking Tool')
|
||||
parser.add_argument('-d', '--domains', help='Comma seperate list of domains or file containing list of domains')
|
||||
parser.add_argument('-s', '--subdomains', help='File containing list of subdomains')
|
||||
parser.add_argument('-r', '--resolvers', help='File containing list of DNS resolvers')
|
||||
parser.add_argument('-rt', '--rectype', default='A,AAAA', help='Comma-seperated list of DNS record type (default: A,AAAA)')
|
||||
parser.add_argument('-c', '--concurrency', type=int, default=25, help='Concurrency limit (default: 50)')
|
||||
parser.add_argument('-t', '--timeout', type=int, default=3, help='Timeout for DNS lookup (default: 3)')
|
||||
parser.add_argument('-n', '--noise', action='store_true', help='Enable random subdomain noise')
|
||||
args = parser.parse_args()
|
||||
|
||||
setup_logging()
|
||||
|
||||
args.rectype = [record_type.upper() for record_type in args.rectype.split(',')]
|
||||
|
||||
if not args.domains:
|
||||
raise SystemExit('no domains specified')
|
||||
elif not os.path.exists(args.domains):
|
||||
raise FileNotFoundError('domains file not found')
|
||||
|
||||
if not args.subdomains:
|
||||
raise SystemExit('no subdomains file specified')
|
||||
elif not os.path.exists(args.subdomains):
|
||||
raise FileNotFoundError('subdomains file not found')
|
||||
|
||||
if not args.resolvers:
|
||||
raise SystemExit('no resolvers file specified')
|
||||
elif not os.path.exists(args.resolvers):
|
||||
raise FileNotFoundError('resolvers file not found')
|
||||
|
||||
valid_record_types = ('A', 'AAAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT')
|
||||
|
||||
for record_type in args.rectype:
|
||||
if record_type not in valid_record_types:
|
||||
raise SystemExit(f'invalid record type: {record_type}')
|
||||
|
||||
semaphore = asyncio.BoundedSemaphore(args.concurrency)
|
||||
tasks = []
|
||||
|
||||
while True:
|
||||
for domain in args.domains.split(','):
|
||||
for dns_server in dns_keys:
|
||||
tasks = []
|
||||
|
||||
for domain in read_domain_file(args.domains):
|
||||
|
||||
for dns_server in read_dns_file(args.resolvers):
|
||||
sub_domain = generate_subdomain(args.subdomains, domain, 3)
|
||||
|
||||
if len(tasks) < args.concurrency:
|
||||
query_record = random.choice(args.rectype)
|
||||
task = asyncio.create_task(dns_lookup(domain, dns_keys[dns_server], dns_server, query_record, args.timeout, semaphore))
|
||||
query_record = random.choice(args.record_types)
|
||||
task = asyncio.create_task(dns_lookup(semaphore, domain, sub_domain, dns_server, query_record, args.timeout))
|
||||
tasks.append(task)
|
||||
|
||||
else:
|
||||
done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
|
||||
tasks = list(pending)
|
||||
|
||||
await asyncio.wait(tasks) # Wait for any remaining tasks to complete
|
||||
|
||||
if not args.noise:
|
||||
break
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
import os
|
||||
import urllib.request
|
||||
|
||||
parser = argparse.ArgumentParser(description='Passive DNS Knocking Tool')
|
||||
parser.add_argument('-d', '--domains', help='Comma seperate list of domains or file containing list of domains')
|
||||
#parser.add_argument('-s', '--subdomain', help='Subdomain to look up')
|
||||
parser.add_argument('-c', '--concurrency', type=int, default=50, help='Concurrency limit (default: 50)')
|
||||
parser.add_argument('-r', '--resolvers', help='File containing list of DNS resolvers (uses public-dns.info if not specified)')
|
||||
parser.add_argument('-rt', '--rectype', default='A,AAAA', help='Comma-seperated list of DNS record type (default: A)')
|
||||
parser.add_argument('-t', '--timeout', type=int, default=3, help='Timeout for DNS lookup (default: 3)')
|
||||
parser.add_argument('-n', '--noise', action='store_true', help='Enable random subdomain noise')
|
||||
args = parser.parse_args()
|
||||
|
||||
sh = logging.StreamHandler()
|
||||
sh.setFormatter(logging.Formatter('%(asctime)s | %(levelname)9s | %(message)s', '%I:%M %p'))
|
||||
os.makedirs('logs', exist_ok=True)
|
||||
log_filename = time.strftime('pdk_%Y-%m-%d_%H-%M-%S.log')
|
||||
fh = logging.handlers.RotatingFileHandler(f'logs/{log_filename}.log', maxBytes=2500000, backupCount=3, encoding='utf-8')
|
||||
fh.setFormatter(logging.Formatter('%(asctime)s | %(levelname)9s | %(filename)s.%(funcName)s.%(lineno)d | %(message)s', '%Y-%m-%d %I:%M %p'))
|
||||
logging.basicConfig(level=logging.NOTSET, handlers=(sh,fh))
|
||||
|
||||
if not args.domains:
|
||||
raise SystemExit('no domains specified')
|
||||
|
||||
if args.rectype:
|
||||
valid_record_types = ('A', 'AAAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT')
|
||||
if ',' in args.rectype:
|
||||
args.rectype = args.rectype.split(',')
|
||||
for record_type in args.rectype:
|
||||
if record_type not in valid_record_types:
|
||||
logging.fatal('invalid record type')
|
||||
elif args.rectype not in valid_record_types:
|
||||
logging.fatal('invalid record type')
|
||||
else:
|
||||
args.rectype = [args.rectype]
|
||||
|
||||
if args.resolvers:
|
||||
if os.path.exists(args.resolvers):
|
||||
with open(args.resolvers, 'r') as file:
|
||||
dns_servers = [item.strip() for item in file.readlines() if item.strip()]
|
||||
logging.info(f'Loaded {len(dns_servers):,} DNS servers from file')
|
||||
else:
|
||||
logging.fatal('DNS servers file does not exist')
|
||||
else:
|
||||
dns_servers = urllib.request.urlopen('https://public-dns.info/nameservers.txt').read().decode().split('\n')
|
||||
logging.info(f'Loaded {len(dns_servers):,} DNS servers from public-dns.info')
|
||||
|
||||
# Command line argument needed for this still
|
||||
if os.path.exists('random_subdomains.txt'):
|
||||
with open('random_subdomains.txt', 'r') as file:
|
||||
sub_domains = [item.strip() for item in file.readlines() if item.strip()]
|
||||
logging.info(f'Loaded {len(sub_domains):,} subdomains from file')
|
||||
else:
|
||||
logging.fatal('random_subdomains.txt is missing')
|
||||
|
||||
dns_keys = dict()
|
||||
for dns_server in dns_servers:
|
||||
dns_keys[dns_server] = generate_subdomain(sub_domains)
|
||||
with open('dns_keys.txt', 'w') as file:
|
||||
json.dump(dns_keys, file)
|
||||
|
||||
asyncio.run(main(args))
|
||||
asyncio.run(main())
|
Loading…
Reference in New Issue
Block a user