eris- Elasticsearch Recon Ingestion Scripts (ERIS) 🔎 |
git clone git://git.acid.vegas/eris.git |
Log | Files | Refs | Archive | README | LICENSE |
ingest_masscan.py (4752B)
1 #!/usr/bin/env python 2 # Elasticsearch Recon Ingestion Scripts (ERIS) - Developed by Acidvegas (https://git.acid.vegas/eris) 3 # ingest_masscan.py 4 5 import json 6 import logging 7 import time 8 9 try: 10 import aiofiles 11 except ImportError: 12 raise ImportError('Missing required \'aiofiles\' library. (pip install aiofiles)') 13 14 15 # Set a default elasticsearch index if one is not provided 16 default_index = 'eris-masscan' 17 18 19 def construct_map() -> dict: 20 '''Construct the Elasticsearch index mapping for Masscan records.''' 21 22 # Match on exact value or full text search 23 keyword_mapping = { 'type': 'text', 'fields': { 'keyword': { 'type': 'keyword', 'ignore_above': 256 } } } 24 25 # Construct the geoip mapping (Used with the geoip pipeline to enrich the data) 26 geoip_mapping = { 27 'city_name' : keyword_mapping, 28 'continent_name' : keyword_mapping, 29 'country_iso_code' : keyword_mapping, 30 'country_name' : keyword_mapping, 31 'location' : { 'type': 'geo_point' }, 32 'region_iso_code' : keyword_mapping, 33 'region_name' : keyword_mapping, 34 } 35 36 # Construct the index mapping 37 mapping = { 38 'mappings': { 39 'properties': { 40 'ip' : { 'type': 'ip' }, 41 'port' : { 'type': 'integer' }, 42 'proto' : { 'type': 'keyword' }, 43 'service' : { 'type': 'keyword' }, 44 'banner' : keyword_mapping, 45 'seen' : { 'type': 'date' } 46 #'geoip' : { 'properties': geoip_mapping } 47 } 48 } 49 } 50 51 return mapping 52 53 54 async def process_data(input_path: str): 55 ''' 56 Read and process the input file 57 58 :param input_path: Path to the input file 59 ''' 60 61 async with aiofiles.open(input_path) as input_file: 62 # Read the input file line by line 63 async for line in input_file: 64 line = line.strip() 65 66 # Sentinel value to indicate the end of a process (for closing out a FIFO stream) 67 if line == '~eof': 68 break 69 70 # Skip empty lines and lines that do not start with a JSON object 71 if not line or not line.startswith('{'): 72 continue 73 74 # Parse the JSON record 75 try: 76 record = json.loads(line) 77 except json.decoder.JSONDecodeError: 78 logging.error(f'Failed to parse JSON record! ({line})') 79 continue 80 81 # Process the record 82 struct = { 83 'ip' : record['ip'], 84 'port' : record['port'], 85 'proto' : record['proto'], 86 'seen' : time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(int(record['timestamp']))) 87 } 88 89 # Add the service information if available (this field is optional) 90 if record['rec_type'] == 'banner': 91 data = record['data'] 92 if 'service_name' in data: 93 if (service_name := data['service_name']) not in ('unknown', ''): 94 struct['service'] = service_name 95 if 'banner' in data: 96 banner = ' '.join(data['banner'].split()) # Remove extra whitespace 97 if banner: 98 struct['banner'] = banner 99 100 # Yield the record 101 yield {'_index': default_index, '_source': struct} 102 103 104 async def test(input_path: str): 105 ''' 106 Test the ingestion process 107 108 :param input_path: Path to the input file 109 ''' 110 111 async for document in process_data(input_path): 112 print(document) 113 114 115 116 if __name__ == '__main__': 117 import argparse 118 import asyncio 119 120 parser = argparse.ArgumentParser(description='Ingestor for ERIS') 121 parser.add_argument('input_path', help='Path to the input file or directory') 122 args = parser.parse_args() 123 124 asyncio.run(test(args.input_path)) 125 126 127 128 ''' 129 Deploy: 130 apt-get install iptables masscan libpcap-dev screen 131 setcap 'CAP_NET_RAW+eip CAP_NET_ADMIN+eip' /bin/masscan 132 /sbin/iptables -A INPUT -p tcp --dport 61010 -j DROP # Not persistent 133 printf "0.0.0.0/8\n10.0.0.0/8\n100.64.0.0/10\n127.0.0.0/8\n169.254.0.0/16\n172.16.0.0/12\n192.0.0.0/24\n192.0.2.0/24\n192.31.196.0/24\n192.52.193.0/24\n192.88.99.0/24\n192.168.0.0/16\n192.175.48.0/24\n198.18.0.0/15\n198.51.100.0/24\n203.0.113.0/24\n224.0.0.0/3\n255.255.255.255/32" > exclude.conf 134 screen -S scan 135 masscan 0.0.0.0/0 -p18000 --banners --http-user-agent "USER_AGENT" --source-port 61010 --open-only --rate 30000 --excludefile exclude.conf -oD 18000.json 136 masscan 0.0.0.0/0 -p21,22,23 --banners --http-user-agent "USER_AGENT" --source-port 61000-65503 --open-only --rate 30000 --excludefile exclude.conf -oD output_new.json --shard $i/$TOTAL 137 138 Output: 139 { 140 "ip" : "43.134.51.142", 141 "timestamp" : "1705255468", 142 "ports" : [ 143 { 144 "port" : 22, # We will create a record for each port opened 145 "proto" : "tcp", 146 "service" : { 147 "name" : "ssh", 148 "banner" : "SSH-2.0-OpenSSH_8.9p1 Ubuntu-3ubuntu0.4" 149 } 150 } 151 ] 152 } 153 154 Input: 155 { 156 "_id" : "43.134.51.142:22" 157 "_index" : "masscan-logs", 158 "_source" : { 159 "ip" : "43.134.51.142", 160 "port" : 22, 161 "proto" : "tcp", 162 "service" : "ssh", 163 "banner" : "SSH-2.0-OpenSSH_8.9p1 Ubuntu-3ubuntu0.4", 164 "seen" : "2021-10-08T02:04:28Z" 165 } 166 '''