Compare commits
6 Commits
Author | SHA1 | Date | |
---|---|---|---|
e8cc4e2ddb | |||
2a7b271d4f | |||
ff89f14a85 | |||
e9795a0177 | |||
551df89f3f | |||
9e76ed0684 |
@ -39,8 +39,6 @@ czds [-h] [-u USERNAME] [-p PASSWORD] [-z] [-c CONCURRENCY] [-d] [-k] [-r] [-s]
|
||||
###### Zone Options
|
||||
| `-z`, `--zones` | Download zone files | |
|
||||
| `-c`, `--concurrency` | Number of concurrent downloads | `3` |
|
||||
| `-d`, `--decompress` | Decompress zone files after download | |
|
||||
| `-k`, `--keep` | Keep original gzip files after decompression | |
|
||||
|
||||
###### Report Options
|
||||
| `-r`, `--report` | Download the zone stats report | |
|
||||
|
@ -4,8 +4,7 @@
|
||||
|
||||
from .client import CZDS
|
||||
|
||||
|
||||
__version__ = '1.3.3'
|
||||
__version__ = '1.3.8'
|
||||
__author__ = 'acidvegas'
|
||||
__email__ = 'acid.vegas@acid.vegas'
|
||||
__github__ = 'https://github.com/acidvegas/czds'
|
@ -10,11 +10,6 @@ import os
|
||||
|
||||
from .client import CZDS
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
async def main():
|
||||
'''Entry point for the command line interface'''
|
||||
|
@ -6,6 +6,8 @@ import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import csv
|
||||
import io
|
||||
|
||||
try:
|
||||
import aiohttp
|
||||
@ -44,8 +46,21 @@ class CZDS:
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
# Set the session with longer timeouts
|
||||
self.session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=None, connect=60, sock_connect=60, sock_read=60))
|
||||
# Configure TCP keepalive
|
||||
connector = aiohttp.TCPConnector(
|
||||
keepalive_timeout=300, # Keep connections alive for 5 minutes
|
||||
force_close=False, # Don't force close connections
|
||||
enable_cleanup_closed=True, # Cleanup closed connections
|
||||
ttl_dns_cache=300, # Cache DNS results for 5 minutes
|
||||
)
|
||||
|
||||
# Set the session with longer timeouts and keepalive
|
||||
self.session = aiohttp.ClientSession(
|
||||
connector=connector,
|
||||
timeout=aiohttp.ClientTimeout(total=None, connect=60, sock_connect=60, sock_read=None),
|
||||
headers={'Connection': 'keep-alive'},
|
||||
raise_for_status=True
|
||||
)
|
||||
|
||||
# Placeholder for the headers after authentication
|
||||
self.headers = None
|
||||
@ -125,15 +140,12 @@ class CZDS:
|
||||
Downloads the zone report stats from the API and scrubs the report for privacy
|
||||
|
||||
:param filepath: Filepath to save the scrubbed report
|
||||
:param scrub: Whether to scrub the username from the report
|
||||
:param format: Output format ('csv' or 'json')
|
||||
'''
|
||||
|
||||
logging.info('Downloading zone stats report')
|
||||
|
||||
# Send the request to the API
|
||||
async with self.session.get('https://czds-api.icann.org/czds/requests/report', headers=self.headers) as response:
|
||||
# Check if the request was successful
|
||||
if response.status != 200:
|
||||
raise Exception(f'Failed to download the zone stats report: {response.status} {await response.text()}')
|
||||
|
||||
@ -144,9 +156,21 @@ class CZDS:
|
||||
content = content.replace(self.username, 'nobody@no.name')
|
||||
logging.debug('Scrubbed username from report')
|
||||
|
||||
# Convert the report to JSON format if requested (default is CSV)
|
||||
# Convert the report to JSON format if requested
|
||||
if format.lower() == 'json':
|
||||
content = json.dumps(content, indent=4)
|
||||
# Parse CSV content
|
||||
csv_reader = csv.DictReader(io.StringIO(content))
|
||||
|
||||
# Convert to list of dicts with formatted keys
|
||||
json_data = []
|
||||
for row in csv_reader:
|
||||
formatted_row = {
|
||||
key.lower().replace(' ', '_'): value
|
||||
for key, value in row.items()
|
||||
}
|
||||
json_data.append(formatted_row)
|
||||
|
||||
content = json.dumps(json_data, indent=4)
|
||||
logging.debug('Converted report to JSON format')
|
||||
|
||||
# Save the report to a file if a filepath is provided
|
||||
@ -169,17 +193,24 @@ class CZDS:
|
||||
|
||||
async def _download():
|
||||
tld_name = url.split('/')[-1].split('.')[0] # Extract TLD from URL
|
||||
max_retries = 10 # Maximum number of retries for failed downloads
|
||||
max_retries = 20 # Maximum number of retries for failed downloads
|
||||
retry_delay = 5 # Delay between retries in seconds
|
||||
timeout = aiohttp.ClientTimeout(total=None, connect=60, sock_connect=60, sock_read=None)
|
||||
|
||||
# Headers for better connection stability
|
||||
download_headers = {
|
||||
**self.headers,
|
||||
'Connection': 'keep-alive',
|
||||
'Keep-Alive': 'timeout=600', # 10 minutes
|
||||
'Accept-Encoding': 'gzip'
|
||||
}
|
||||
|
||||
# Start the attempt loop
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
logging.info(f'Starting download of {tld_name} zone file{" (attempt " + str(attempt + 1) + ")" if attempt > 0 else ""}')
|
||||
|
||||
# Send the request to the API
|
||||
async with self.session.get(url, headers=self.headers, timeout=timeout) as response:
|
||||
async with self.session.get(url, headers=download_headers) as response:
|
||||
# Check if the request was successful
|
||||
if response.status != 200:
|
||||
logging.error(f'Failed to download {tld_name}: {response.status} {await response.text()}')
|
||||
|
@ -2,6 +2,7 @@
|
||||
# ICANN API for the Centralized Zones Data Service - developed by acidvegas (https://git.acid.vegas/czds)
|
||||
# czds/utils.py
|
||||
|
||||
import asyncio
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
@ -24,43 +25,30 @@ async def gzip_decompress(filepath: str, cleanup: bool = True):
|
||||
:param filepath: Path to the gzip file
|
||||
:param cleanup: Whether to remove the original gzip file after decompressions
|
||||
'''
|
||||
|
||||
# Get the original size of the file
|
||||
original_size = os.path.getsize(filepath)
|
||||
|
||||
output_path = filepath[:-3]
|
||||
|
||||
logging.debug(f'Decompressing {filepath} ({humanize_bytes(original_size)})...')
|
||||
|
||||
# Remove the .gz extension
|
||||
output_path = filepath[:-3]
|
||||
# Use a large chunk size (256MB) for maximum throughput
|
||||
chunk_size = 256 * 1024 * 1024
|
||||
|
||||
# Set the chunk size to 25MB
|
||||
chunk_size = 25 * 1024 * 1024
|
||||
|
||||
# Create progress bar for decompression
|
||||
# Run the actual decompression in a thread pool to prevent blocking
|
||||
with tqdm(total=original_size, unit='B', unit_scale=True, desc=f'Decompressing {os.path.basename(filepath)}', leave=False) as pbar:
|
||||
# Decompress the file
|
||||
with gzip.open(filepath, 'rb') as gz:
|
||||
async with aiofiles.open(output_path, 'wb') as f_out:
|
||||
async with aiofiles.open(output_path, 'wb') as f_out:
|
||||
# Run gzip decompression in thread pool since it's CPU-bound
|
||||
loop = asyncio.get_event_loop()
|
||||
with gzip.open(filepath, 'rb') as gz:
|
||||
while True:
|
||||
# Read the next chunk
|
||||
chunk = gz.read(chunk_size)
|
||||
|
||||
# If the chunk is empty, break
|
||||
chunk = await loop.run_in_executor(None, gz.read, chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
# Write the chunk to the output file
|
||||
await f_out.write(chunk)
|
||||
|
||||
# Update the progress bar
|
||||
pbar.update(len(chunk))
|
||||
|
||||
# Get the decompressed size of the file
|
||||
decompressed_size = os.path.getsize(output_path)
|
||||
|
||||
logging.debug(f'Decompressed {filepath} ({humanize_bytes(decompressed_size)})')
|
||||
|
||||
# If the cleanup flag is set, remove the original gzip file
|
||||
if cleanup:
|
||||
os.remove(filepath)
|
||||
logging.debug(f'Removed original gzip file: {filepath}')
|
||||
|
Loading…
Reference in New Issue
Block a user