2024-01-20 07:04:50 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# Elasticsearch Recon Ingestion Scripts (ERIS) - Developed by Acidvegas (https://git.acid.vegas/eris)
|
2024-02-02 05:11:18 +00:00
|
|
|
# ingest_masscan.py
|
2024-01-20 07:04:50 +00:00
|
|
|
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
|
2024-03-06 03:19:11 +00:00
|
|
|
try:
|
|
|
|
import aiofiles
|
|
|
|
except ImportError:
|
|
|
|
raise ImportError('Missing required \'aiofiles\' library. (pip install aiofiles)')
|
|
|
|
|
2024-03-08 02:57:10 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
default_index = 'masscan-logs'
|
|
|
|
|
2024-03-08 02:57:10 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
def construct_map() -> dict:
|
|
|
|
'''Construct the Elasticsearch index mapping for Masscan records.'''
|
|
|
|
|
|
|
|
keyword_mapping = { 'type': 'text', 'fields': { 'keyword': { 'type': 'keyword', 'ignore_above': 256 } } }
|
|
|
|
|
2024-03-06 18:26:45 +00:00
|
|
|
geoip_mapping = {
|
|
|
|
'city_name' : keyword_mapping,
|
|
|
|
'continent_name' : keyword_mapping,
|
|
|
|
'country_iso_code' : keyword_mapping,
|
|
|
|
'country_name' : keyword_mapping,
|
|
|
|
'location' : { 'type': 'geo_point' },
|
|
|
|
'region_iso_code' : keyword_mapping,
|
|
|
|
'region_name' : keyword_mapping,
|
|
|
|
}
|
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
mapping = {
|
|
|
|
'mappings': {
|
|
|
|
'properties': {
|
2024-03-06 18:26:45 +00:00
|
|
|
'ip' : { 'type': 'ip' },
|
|
|
|
'port' : { 'type': 'integer' },
|
2024-03-08 02:57:10 +00:00
|
|
|
'data' : {
|
|
|
|
'properties': {
|
|
|
|
'proto' : { 'type': 'keyword' },
|
|
|
|
'service' : { 'type': 'keyword' },
|
|
|
|
'banner' : keyword_mapping,
|
|
|
|
'seen' : { 'type': 'date' }
|
|
|
|
}
|
|
|
|
},
|
|
|
|
#'geoip' : { 'properties': geoip_mapping } # Used with the geoip pipeline to enrich the data
|
|
|
|
'last_seen' : { 'type': 'date' }
|
2024-01-20 07:04:50 +00:00
|
|
|
}
|
|
|
|
}
|
2024-02-02 05:11:18 +00:00
|
|
|
}
|
2024-01-20 07:04:50 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
return mapping
|
2024-01-20 15:53:55 +00:00
|
|
|
|
2024-01-20 07:04:50 +00:00
|
|
|
|
2024-03-06 03:19:11 +00:00
|
|
|
async def process_data(file_path: str):
|
2024-02-02 05:11:18 +00:00
|
|
|
'''
|
|
|
|
Read and process Masscan records from the log file.
|
2024-01-20 15:53:55 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
:param file_path: Path to the Masscan log file
|
|
|
|
'''
|
2024-01-20 07:04:50 +00:00
|
|
|
|
2024-03-08 05:07:26 +00:00
|
|
|
async with aiofiles.open(file_path) as input_file:
|
2024-03-06 03:19:11 +00:00
|
|
|
async for line in input_file:
|
2024-02-02 05:11:18 +00:00
|
|
|
line = line.strip()
|
2024-01-27 09:28:30 +00:00
|
|
|
|
2024-03-06 18:26:45 +00:00
|
|
|
if line == '~eof': # Sentinel value to indicate the end of a process (Used with --watch with FIFO)
|
|
|
|
break
|
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
if not line or not line.startswith('{'):
|
|
|
|
continue
|
2024-01-27 09:28:30 +00:00
|
|
|
|
2024-03-06 18:26:45 +00:00
|
|
|
if line.endswith(','): # Do we need this? Masscan JSON output seems with seperate records with a comma between lines for some reason...
|
2024-03-04 22:44:09 +00:00
|
|
|
line = line[:-1]
|
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
try:
|
|
|
|
record = json.loads(line)
|
|
|
|
except json.decoder.JSONDecodeError:
|
2024-03-06 03:19:11 +00:00
|
|
|
# In rare cases, the JSON record may be incomplete or malformed:
|
2024-03-08 02:57:10 +00:00
|
|
|
# { "ip": "51.161.12.223", "timestamp": "1707628302", "ports": [ {"port": 22, "proto": "tcp", "service": {"name": "ssh", "banner":
|
|
|
|
# { "ip": "83.66.211.246", "timestamp": "1706557002"
|
2024-02-02 05:11:18 +00:00
|
|
|
logging.error(f'Failed to parse JSON record! ({line})')
|
2024-03-06 18:26:45 +00:00
|
|
|
input('Press Enter to continue...') # Pause for review & debugging (remove this in production)
|
2024-02-02 05:11:18 +00:00
|
|
|
continue
|
|
|
|
|
2024-03-06 03:19:11 +00:00
|
|
|
if len(record['ports']) > 1:
|
2024-03-06 18:26:45 +00:00
|
|
|
# In rare cases, a single record may contain multiple ports, though I have yet to witness this...
|
2024-03-06 03:19:11 +00:00
|
|
|
logging.warning(f'Multiple ports found for record! ({record})')
|
2024-03-06 18:26:45 +00:00
|
|
|
input('Press Enter to continue...') # Pause for review (remove this in production)
|
2024-03-06 03:19:11 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
for port_info in record['ports']:
|
|
|
|
struct = {
|
2024-03-08 02:57:10 +00:00
|
|
|
'ip' : record['ip'],
|
|
|
|
'data' : {
|
|
|
|
'port' : port_info['port'],
|
|
|
|
'proto' : port_info['proto'],
|
|
|
|
'seen' : time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(int(record['timestamp']))),
|
|
|
|
},
|
|
|
|
'last_seen' : time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(int(record['timestamp']))),
|
2024-02-02 05:11:18 +00:00
|
|
|
}
|
2024-01-27 06:13:11 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
if 'service' in port_info:
|
|
|
|
if 'name' in port_info['service']:
|
2024-03-06 03:19:11 +00:00
|
|
|
if (service_name := port_info['service']['name']) not in ('unknown',''):
|
|
|
|
struct['service'] = service_name
|
2024-01-27 06:13:11 +00:00
|
|
|
|
2024-02-02 05:11:18 +00:00
|
|
|
if 'banner' in port_info['service']:
|
|
|
|
banner = ' '.join(port_info['service']['banner'].split()) # Remove extra whitespace
|
|
|
|
if banner:
|
2024-03-06 18:26:45 +00:00
|
|
|
struct['banner'] = banner
|
2024-01-27 06:13:11 +00:00
|
|
|
|
2024-03-06 18:26:45 +00:00
|
|
|
id = f'{record["ip"]}:{port_info["port"]}' # Store with ip:port as the unique id to allow the record to be reindexed if it exists.
|
|
|
|
|
|
|
|
yield {'_id': id, '_index': default_index, '_source': struct}
|
2024-01-27 06:13:11 +00:00
|
|
|
|
2024-01-20 07:04:50 +00:00
|
|
|
|
2024-03-08 04:31:30 +00:00
|
|
|
async def test(input_path: str):
|
|
|
|
'''
|
|
|
|
Test the Masscan ingestion process
|
|
|
|
|
|
|
|
:param input_path: Path to the MassDNS log file
|
|
|
|
'''
|
|
|
|
async for document in process_data(input_path):
|
|
|
|
print(document)
|
|
|
|
|
2024-01-20 07:04:50 +00:00
|
|
|
|
2024-03-08 04:31:30 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
import argparse
|
|
|
|
import asyncio
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='Masscan Ingestor for ERIS')
|
|
|
|
parser.add_argument('input_path', help='Path to the input file or directory')
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
asyncio.run(test(args.input_path))
|
2024-03-06 18:26:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
'''
|
2024-03-08 04:31:30 +00:00
|
|
|
Deploy:
|
|
|
|
apt-get install iptables masscan libpcap-dev screen
|
|
|
|
setcap 'CAP_NET_RAW+eip CAP_NET_ADMIN+eip' /bin/masscan
|
|
|
|
/sbin/iptables -A INPUT -p tcp --dport 61010 -j DROP # Not persistent
|
|
|
|
printf "0.0.0.0/8\n10.0.0.0/8\n100.64.0.0/10\n127.0.0.0/8\n169.254.0.0/16\n172.16.0.0/12\n192.0.0.0/24\n192.0.2.0/24\n192.31.196.0/24\n192.52.193.0/24\n192.88.99.0/24\n192.168.0.0/16\n192.175.48.0/24\n198.18.0.0/15\n198.51.100.0/24\n203.0.113.0/24\n224.0.0.0/3\n255.255.255.255/32" > exclude.conf
|
|
|
|
screen -S scan
|
|
|
|
masscan 0.0.0.0/0 -p21,22,23 --banners --http-user-agent "USER_AGENT" --source-port 61010 --open-only --rate 30000 --excludefile exclude.conf -oJ output.json
|
|
|
|
masscan 0.0.0.0/0 -p21,22,23 --banners --http-user-agent "USER_AGENT" --source-port 61000-65503 --open-only --rate 30000 --excludefile exclude.conf -oJ output_new.json --shard $i/$TOTAL
|
|
|
|
|
|
|
|
Output:
|
|
|
|
{
|
|
|
|
"ip" : "43.134.51.142",
|
|
|
|
"timestamp" : "1705255468",
|
|
|
|
"ports" : [
|
|
|
|
{
|
|
|
|
"port" : 22, # We will create a record for each port opened
|
|
|
|
"proto" : "tcp",
|
|
|
|
"service" : {
|
|
|
|
"name" : "ssh",
|
|
|
|
"banner" : "SSH-2.0-OpenSSH_8.9p1 Ubuntu-3ubuntu0.4"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
Input:
|
|
|
|
{
|
|
|
|
"_id" : "43.134.51.142:22"
|
|
|
|
"_index" : "masscan-logs",
|
|
|
|
"_source" : {
|
|
|
|
"ip" : "43.134.51.142",
|
|
|
|
"port" : 22,
|
|
|
|
"proto" : "tcp",
|
|
|
|
"service" : "ssh",
|
|
|
|
"banner" : "SSH-2.0-OpenSSH_8.9p1 Ubuntu-3ubuntu0.4",
|
|
|
|
"seen" : "2021-10-08T02:04:28Z"
|
|
|
|
}
|
2024-02-02 05:11:18 +00:00
|
|
|
'''
|