Compare commits

...

10 Commits

4 changed files with 86 additions and 55 deletions

View File

@ -16,9 +16,20 @@ It is expected to set *realistic* expectations when using this tool. In contempo
| `-t`, `--timeout` | DNS timeout *(default: 30)* |
## Information
I only wrote this to shit on **[this bozo](https://github.com/flotwig/TLDR-2/tree/main)** who took a dead project & brought it back to life by making it even worse. Rather than making a pull request to give this bloke more credit in his "tenure" as a developer, I decided to just rewrite it all from scratch so people can fork off of *clean* code instead.
I only wrote this to shit on **[this bozo](https://github.com/flotwig/TLDR-2/)** who took a dead project & brought it back to life by making it even worse. Rather than making a pull request to give this bloke more credit in his "tenure" as a developer, I decided to just rewrite it all from scratch so people can fork off of *clean* code instead.
This repostiory also contains a [pure POSIX version](./mdaxfr) for portability, aswell as a [script](./opennic) to do zone transfers on [OpenNIC TLDs](https://wiki.opennic.org/opennic/dot).
This repostiory also contains a [pure POSIX version](./mdaxfr) for portability, aswell as a [script](./opennic) to do zone transfers on [OpenNIC TLDs](https://wiki.opennic.org/opennic/dot). Included also is a special [ozones](./ozones) script for fetching a few obscure zones in a non-convential manner.
## Statistics, laughs, & further thinking...
As of my last scan in 2023, I was only able to AXFR the zones for 8 out of 1,456 root TLDs, with a few of them being zones that were already retrieved by [acidvegas/czds](https://github.com/acidvegas/czds/), and over 100 TLDs in the [Public suffix list](https://publicsuffix.org/). The addition scripts in this repository provide additional zone files collected un-traditionally.
For laughs, here is a one-liner mass zone axfr:
```bash
curl -s https://www.internic.net/domain/root.zone | awk '$4=="A" || $4=="AAAA" {print substr($1, 3) " " $5}' | sed 's/\.$//' | xargs -n2 sh -c 'dig AXFR "$0" "@$1"'
```
**Note:** Don't actually use this lol...
It is interesting to have seen this has worked on some darknet DNS servers...would maybe look into exploring collecting more zones for alterntive DNS routing. Some of that goes beyond an "AXFR" though...
___

54
mdaxfr
View File

@ -7,37 +7,49 @@ mkdir -p "$OUTPUT_DIR/root"
mkdir -p "$OUTPUT_DIR/psl"
resolve_nameserver() {
dig +short "$1" A || dig +short "$1" AAAA
dig +short AAAA $1 +short -t A $1 2>/dev/null
}
attempt_axfr() {
tld=$1
nameserver=$2
filename="$3"
temp_file="${filename}.temp"
tld=$1
nameserver=$2
filename="$3"
temp_file="${filename}.temp"
nameserver_ip=$(resolve_nameserver "$nameserver")
if [ -z "$nameserver_ip" ]; then
echo "Failed to resolve nameserver $nameserver"
return
fi
nameserver_ips=$(resolve_nameserver "$nameserver")
if [ -z "$nameserver_ips" ]; then
echo -e "\e[31m[FAIL]\e[0m AXFR for \e[36m$tld\e[0m on \e[33m$nameserver\e[0m \e[90m(failed to resolve nameserver)\e[0m"
return
fi
dig AXFR "$tld" "@$nameserver_ip" > "$temp_file"
if [ $? -eq 0 ]; then
mv "$temp_file" "$filename"
else
echo "Failed to perform zone transfer from $nameserver for $tld"
rm -f "$temp_file"
fi
for nameserver_ip in $nameserver_ips; do
dig AXFR "$tld" "@$nameserver_ip" > "$temp_file"
if ! grep -q 'IN.*NS' "$temp_file"; then
echo -e "[\e[31mFAIL\e[0m] AXFR for \e[36m$tld\e[0m on \e[33m$nameserver\e[0m \e[90m($nameserver_ip)\e[0m"
rm -f "$temp_file"
else
mv "$temp_file" "$filename"
echo -e "[\e[32mSUCCESS\e[0m] AXFR for \e[36m$tld\e[0m on \e[33m$nameserver\e[0m \e[90m($nameserver_ip)\e[0m"
return
fi
done
}
echo "[\e[31mWARNING\e[0m] Most nameservers will block AXFR requests \e[90m(It is normal for most of these to fail)\e[0m"
sleep 3
# For root IP space zones
for i in $(seq 0 255); do
dig +nocmd +noall +answer +multiline $i.in-addr.arpa NS >> $OUTPUT_DIR/root/in-addr.arpa.txt
done
# For root nameservers
for root in $(dig +short . NS); do
for root in $(dig +short . NS | sed 's/\.$//'); do
attempt_axfr "." "$root" "$OUTPUT_DIR/root/$root.txt"
done
# Parse the tld list from a root nameserver
rndroot=$(find $OUTPUT/root/*.root-servers.net.txt -type f | shuf -n 1)
rndroot=$(find $OUTPUT_DIR/root/*.root-servers.net.txt -type f | shuf -n 1)
if [ -z $rndroot ]; then
echo "Failed to AXFR a root nameserver (using IANA list instead)"
tlds=$(curl -s 'https://data.iana.org/TLD/tlds-alpha-by-domain.txt' | tail -n +2 | tr '[:upper:]' '[:lower:]')
@ -47,14 +59,14 @@ fi
# For TLD nameservers
for tld in $tlds; do
for ns in $(dig +short "$tld" NS); do
for ns in $(dig +short "$tld" NS | sed 's/\.$//'); do
attempt_axfr "$tld" "$ns" "$OUTPUT_DIR/$tld.txt"
done
done
# For Public Suffix List TLD nameservers
for tld in $(curl -s https://publicsuffix.org/list/public_suffix_list.dat | grep -vE '^(//|.*[*!])' | grep '\.' | awk '{print $1}'); do
for ns in $(dig +short "$tld" NS); do
for ns in $(dig +short "$tld" NS | sed 's/\.$//'); do
attempt_axfr "$tld" "$ns" "$OUTPUT_DIR/psl/$tld.txt"
done
done

View File

@ -29,23 +29,26 @@ def attempt_axfr(tld: str, nameserver: str, filename: str):
else:
for ns in nameserver: # Let's try all the IP addresses for the nameserver
try:
with open(temp_file, 'w') as file:
xfr = dns.query.xfr(nameserver.address, tld+'.', lifetime=300)
for msg in xfr:
for rrset in msg.answer:
for rdata in rrset:
file.write(f'{rrset.name}.{tld} {rrset.ttl} {rdata}\n')
os.rename(temp_file, filename)
xfr = dns.query.xfr(ns, tld+'.', lifetime=300)
if xfr:
with open(temp_file, 'w') as file:
for msg in xfr:
for rrset in msg.answer:
for rdata in rrset:
file.write(f'{rrset.name}.{tld} {rrset.ttl} {rdata}\n')
os.rename(temp_file, filename)
break
except Exception as ex:
# Most zone transfers are blocked, so we don't want to log them
#logging.error(f'Failed to perform zone transfer from {nameserver} ({ns}) for {tld}: {ex}')
if os.path.exists(temp_file):
os.remove(temp_file)
logging.error(f'Failed to perform zone transfer from {nameserver.address} for {tld}: {ex}')
def get_nameservers(target: str) -> list:
'''
Generate a list of the root nameservers.
:param target: The target domain to get the nameservers for.
'''
try:
@ -59,13 +62,19 @@ def get_nameservers(target: str) -> list:
return []
def get_root_tlds() -> list:
'''Get the root TLDs from a root nameservers.'''
rndroot = [root for root in os.listdir('root') if root.endswith('.root-servers.net.txt')][0]
def get_root_tlds(output_dir: str) -> list:
'''
Get the root TLDs from a root nameservers.
:param output_dir: The output directory to use.
'''
root_dir = os.path.join(output_dir, 'root')
rndroot = [root for root in os.listdir(root_dir) if root.endswith('.root-servers.net.txt')]
if rndroot:
tlds = sorted(set([item.split()[0][:-1] for item in open(rndroot).read().split('\n') if item and 'IN' in item and 'NS' in item]))
rndroot_file = rndroot[0] # Take the first file from the list
tlds = sorted(set([item.split()[0][:-1] for item in open(os.path.join(root_dir, rndroot_file)).read().split('\n') if item and 'IN' in item and 'NS' in item]))
else:
logging.warning('Failed to find root nameserver list, using IANA list')
logging.warning('Failed to find root nameserver list...fallback to using IANA list')
tlds = urllib.request.urlopen('https://data.iana.org/TLD/tlds-alpha-by-domain.txt').read().decode('utf-8').lower().split('\n')[1:]
random.shuffle(tlds)
return tlds
@ -109,23 +118,26 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Mass DNS AXFR')
parser.add_argument('-c', '--concurrency', type=int, default=30, help='maximum concurrent tasks')
parser.add_argument('-o', '--output', default='axfrout', help='output directory')
parser.add_argument('-t', '--timeout', type=int, default=30, help='DNS timeout (default: 30)')
parser.add_argument('-t', '--timeout', type=int, default=15, help='DNS timeout (default: 15)')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
os.makedirs(args.output, exist_ok=True)
dns.resolver._DEFAULT_TIMEOUT = args.timeout
# Grab the root nameservers
os.makedirs(os.path.join(args.output, 'root'), exist_ok=True)
logging.info('Fetching root nameservers...')
root_dir = os.path.join(args.output, 'root')
os.makedirs(root_dir, exist_ok=True)
with concurrent.futures.ThreadPoolExecutor(max_workers=args.concurrency) as executor:
futures = [executor.submit(attempt_axfr, '', root, os.path.join(args.output, f'root/{root}.txt')) for root in get_nameservers('')]
futures = [executor.submit(attempt_axfr, tld, ns, os.path.join(args.output, tld + '.txt')) for tld in get_root_tlds(root_dir) for ns in get_nameservers(tld) if ns]
for future in concurrent.futures.as_completed(futures):
try:
future.result()
except Exception as e:
logging.error(f'Error in root server task: {e}')
logging.error(f'Error in TLD task: {e}')
# Get the root TLDs
logging.info('Fetching root TLDs...')
with concurrent.futures.ThreadPoolExecutor(max_workers=args.concurrency) as executor:
futures = [executor.submit(attempt_axfr, tld, ns, os.path.join(args.output, tld + '.txt')) for tld in get_root_tlds() for ns in get_nameservers(tld) if ns]
for future in concurrent.futures.as_completed(futures):
@ -134,7 +146,7 @@ if __name__ == '__main__':
except Exception as e:
logging.error(f'Error in TLD task: {e}')
# Get the Public Suffix List
logging.info('Fetching PSL TLDs...')
os.makedirs(os.path.join(args.output, 'psl'), exist_ok=True)
with concurrent.futures.ThreadPoolExecutor(max_workers=args.concurrency) as executor:
futures = [executor.submit(attempt_axfr, tld, ns, os.path.join(args.output, f'psl/{tld}.txt')) for tld in get_psl_tlds() for ns in get_nameservers(tld) if ns]

18
ozones
View File

@ -1,31 +1,27 @@
#!/bin/sh
# Mass DNS AXFR (other zones) - developed by acidvegas (https://git.acid.vegas/mdaxfr)
```bash
curl -s https://www.internic.net/domain/root.zone | awk '$4=="NS" {gsub(/\.$/, "", $NF); print $NF}'
curl -s https://www.internic.net/domain/root.zone | awk '$4=="A" || $4=="AAAA" {print $5}'
```
```bash
# https://portal.switch.ch/pub/open-data/#tab-fccd70a3-b98e-11ed-9a74-5254009dc73c-3
dig @zonedata.switch.ch ch. AXFR -y hmac-sha512:tsig-zonedata-ch-public-21-01:stZwEGApYumtXkh73qMLPqfbIDozWKZLkqRvcjKSpRnsor6A6MxixRL6C2HeSVBQNfMW4wer+qjS0ZSfiWiJ3Q== > ch.txt
dig @zonedata.switch.ch li. AXFR -y hmac-sha512:tsig-zonedata-li-public-21-01:t8GgeCn+fhPaj+cRy1epox2Vj4hZ45ax6v3rQCkkfIQNg5fsxuU23QM5mzz+BxJ4kgF/jiQyBDBvL+XWPE6oCQ== > li.txt
dig @zonedata.iis.se se AXFR > se.txt
dig @zonedata.iis.se nu AXFR > nu.txt
dig @zone.internet.ee ee. AXFR > ee.txt
dig @ns1.gov.ps xn--ygbi2ammx. AXFR > xn--ygbi2ammx.txt
wget -O sk.txt https://sk-nic.sk/subory/domains.txt
wget -O gov.txt https://raw.githubusercontent.com/cisagov/dotgov-data/main/gov.txt
wget -O nc.txt https://www.domaine.nc/whos?who=A*
# https://www.afnic.fr/produits-services/services-associes/donnees-partagees/
# not sure about this one....
curl -s -H 'Accept: application/json' 'https://odata.domain.fi/OpenDomainData.svc/Domains?$inlinecount=allpages'
curl -s -H 'Accept: application/json' 'https://odata.domain.fi/OpenDomainData.svc/Domains?$inlinecount=allpages' # not sure about this one....
wget -O dn42.txt http://ix.ucis.nl/dn42/dnszone2.php?
```
wget -O dn42.txt http://ix.ucis.nl/dn42/dnszone2.php? # Darknet
curl -s https://www.internic.net/domain/root.zone | awk '$4=="A" || $4=="AAAA" {print $5}'