From 4cf3df7e2cb523a5f2fdf788be1a9587587c232f Mon Sep 17 00:00:00 2001 From: acidvegas Date: Thu, 23 Nov 2023 04:09:36 -0500 Subject: [PATCH] Added color and README --- README.md | 27 ++++++++++++++++++++++++++- ptrstream.py | 23 ++++++++++++++--------- 2 files changed, 40 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 350b796..104ac37 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,28 @@ # PTR Stream -## More to come +PTRStream is an asynchronous reverse DNS lookup tool developed in Python. It generates random IP addresses and performs reverse DNS lookups using various DNS servers. + +## Requirements +- [python](https://www.python.org/) +- [aiodns](https://pypi.org/project/aiodns/) *(pip install aiodns)* + +## Usage + +```bash +python ptrstream.py [options] +``` + +| Argument | Description | +| ---------------------- | ------------------------------------------------------------ | +| `-c`, `--concurrency` | Control the speed of lookups. *(Default = 50)* | +| `-t`, `--timeout` | Timeout for DNS lookups. | +| `-r`, `--resolvers` | File containing DNS servers to use for lookups. *(Optional)* | + +## Now what? +The results are cached and saved to a file named ptr_{date}_{seed}.txt after every 1000 successful lookups. After a full loop through every IP address, a new seed will generate and start the scan again. + +Might add coloring based on classification *(government, data cetner, etc)* + +Output to elastic search possibly. + +Still a work in progress I guess... \ No newline at end of file diff --git a/ptrstream.py b/ptrstream.py index d1d4e89..abf8639 100644 --- a/ptrstream.py +++ b/ptrstream.py @@ -60,13 +60,18 @@ async def main(): tasks = [] results_cache = [] + if args.resolvers: + with open(args.resolvers) as file: + dns_servers = [server.strip() for server in file.readlines()] + while True: - dns_servers = [] - while not dns_servers: - try: - dns_servers = get_dns_servers() - except: - time.sleep(300) + if not args.resolvers: + dns_servers = [] + while not dns_servers: + try: + dns_servers = get_dns_servers() + except: + time.sleep(300) seed = random.randint(10**9, 10**10 - 1) ip_generator = rig(seed) @@ -85,9 +90,8 @@ async def main(): for exclude in ('undefined.hostname.localhost', 'localhost', '127.0.0.1'): if result == exclude: continue - if not result.endswith('.in-addr.arpa') and result != ('undefined.hostname.localhost') and result != ('localhost.'): - print(f'{ip.ljust(15)} -> {result}') - results_cache.append(f'{ip}:{result}') + print(f'\033[96m{ip.ljust(15)}\033[0m \033[90m->\033[0m \033[93m{result}\033[0m') + results_cache.append(f'{ip}:{result}') if len(results_cache) >= 1000: stamp = time.strftime('%Y%m%d') @@ -101,6 +105,7 @@ if __name__ == '__main__': parser = argparse.ArgumentParser(description='Perform asynchronous reverse DNS lookups.') parser.add_argument('-c', '--concurrency', type=int, default=50, help='Control the speed of lookups.') parser.add_argument('-t', '--timeout', type=int, default=5, help='Timeout for DNS lookups.') + parser.add_argument('-r', '--resolvers', type=str, help='File containing DNS servers to use for lookups.') args = parser.parse_args() asyncio.run(main())