avoidr- masscan with exclusive exclusions |
git clone git://git.acid.vegas/avoidr.git |
Log | Files | Refs | Archive | README | LICENSE |
avoidr.py (6534B)
1 #/usr/bin/env python 2 # avoidr (masscan with exclusive exclusions) - developed by acidvegas in python (https://git.acid.vegas/avoidr) 3 4 import hashlib 5 import ipaddress 6 import json 7 import logging 8 import os 9 import sys 10 import time 11 import urllib.request 12 from zipfile import ZipFile 13 14 # Globals 15 grand_total = {'4': 0, '6': 0} 16 results = dict() 17 18 # Setup logger 19 logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') 20 21 22 def calculate_hash(path): 23 ''' 24 Calculate the SHA1 hash of a file. 25 26 :param path: The path to the file to hash. 27 ''' 28 hash_sha1 = hashlib.sha1() 29 with open(path, 'rb') as f: 30 for chunk in iter(lambda: f.read(4096), b''): 31 hash_sha1.update(chunk) 32 return hash_sha1.hexdigest() 33 34 35 def download_file(url: str, dest_filename: str, chunk_size: int = 1024*1024): 36 ''' 37 Download a file from a given URL in chunks and save to a destination filename. 38 39 :param url: The URL of the file to download 40 :param dest_filename: The destination filename to save the downloaded file 41 :param chunk_size: Size of chunks to download. Default is set to 1MB. 42 ''' 43 with urllib.request.urlopen(url) as response: 44 total_size = int(response.getheader('Content-Length').strip()) 45 downloaded_size = 0 46 with open(dest_filename, 'wb') as out_file: 47 while True: 48 start_time = time.time() 49 chunk = response.read(chunk_size) 50 if not chunk: 51 break 52 downloaded_size += len(chunk) 53 out_file.write(chunk) 54 end_time = time.time() 55 speed = len(chunk) / (end_time - start_time) 56 progress = (downloaded_size / total_size) * 100 57 sys.stdout.write(f'\rDownloaded {downloaded_size} of {total_size} bytes ({progress:.2f}%) at {speed/1024:.2f} KB/s\r') 58 sys.stdout.flush() 59 print() 60 61 62 def get_url(url) -> str: 63 ''' 64 Get the contents of a URL. 65 66 :param url: The URL to get the contents of. 67 ''' 68 data = {'Accept': 'application/vnd.github.v3+json', 'User-Agent': 'Avoidr/1.0 (https://git.acid.vegas/avoidr)'} 69 req = urllib.request.Request(url, headers=data) 70 return urllib.request.urlopen(req, timeout=10).read().decode() 71 72 73 def update_database(): 74 '''Update the ASN database.''' 75 76 logging.info('Checking for database updates...') 77 78 DB = 'databases/fullASN.json.zip' 79 update = False 80 os.makedirs('databases', exist_ok=True) 81 82 if not os.path.exists(DB): 83 update = True 84 else: 85 old_hash = calculate_hash(DB) 86 new_hash = json.loads(get_url('https://api.github.com/repos/ipapi-is/ipapi/contents/'+DB))['sha'] 87 if old_hash != new_hash: 88 update = True 89 90 if update: 91 logging.info('Updating database...') 92 for OLD_DB in (DB, DB[:-4]): 93 if os.path.exists(OLD_DB): 94 os.remove(OLD_DB) 95 download_file('https://github.com/ipapi-is/ipapi/raw/main/'+DB, DB) 96 with ZipFile(DB) as zObject: 97 zObject.extract(DB[10:-4], 'databases') 98 else: 99 logging.info('Database is up-to-date!') 100 101 102 103 def process_asn(data: dict): 104 ''' 105 Proccess an ASN and add it to the results. 106 107 :param data: The ASN data to process. 108 ''' 109 110 title = data['descr'] if 'org' not in data else data['descr'] + ' / ' + data['org'] 111 results[data['asn']] = {'name': title, 'ranges': dict()} 112 113 if 'prefixes' in data and not args.ipv6: 114 results[data['asn']]['ranges']['4'] = data['prefixes'] 115 total = total_ips(data['prefixes']) 116 grand_total['4'] += total 117 logging.info('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv4 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixes']), total)) 118 119 if 'prefixesIPv6' in data and not args.ipv4: 120 results[data['asn']]['ranges']['6'] = data['prefixesIPv6'] 121 total = total_ips(data['prefixesIPv6']) 122 grand_total['6'] += total 123 logging.info('Found \033[93mAS{0}\033[0m \033[1;30m({1})\033[0m containing \033[32m{2:,}\033[0m IPv6 ranges with \033[36m{3:,}\033[0m total IP addresses'.format(data['asn'], title, len(data['prefixesIPv6']), total)) 124 125 126 def total_ips(ranges: list) -> int: 127 ''' 128 Calculate the total number of IP addresses in a list of CIDR ranges. 129 130 :param ranges: The list of CIDR ranges to calculate the total number of IP addresses for. 131 ''' 132 return sum(ipaddress.ip_network(cidr).num_addresses for cidr in ranges) 133 134 135 136 # Main 137 if __name__ == '__main__': 138 import argparse 139 140 parser = argparse.ArgumentParser(description='masscan with exclusive exclusions') 141 parser.add_argument('-4', '--ipv4', help='process IPv4 addresses only', action='store_true') 142 parser.add_argument('-6', '--ipv6', help='process IPv6 addresses only', action='store_true') 143 parser.add_argument('-x', '--exclude', help='create exclusions for masscan instead of a json output', action='store_true') 144 parser.add_argument('-s', '--search', help='comma seperated strings to search (no output file)', type=str) 145 parser.add_argument('-u', '--update', help='update the ASN database', action='store_true') 146 147 args = parser.parse_args() 148 149 if args.update or not os.path.exists('databases/fullASN.json'): 150 update_database() 151 152 asn_data = json.loads(open('databases/fullASN.json').read()) 153 154 if args.search: 155 queries = args.search.split(',') 156 else: 157 queries = [line.rstrip() for line in open('custom.txt').readlines()] 158 159 logging.debug(f'Searching {len(queries):,} queries against {len(asn_data):,} ASNs...') 160 161 for asn in asn_data: 162 for field in [x for x in asn_data[asn] if x in ('descr','org')]: 163 if [x for x in queries if x.lower() in asn_data[asn][field].lower()]: 164 if asn_data[asn]['asn'] not in results: 165 process_asn(asn_data[asn]) 166 break 167 168 if not args.search: 169 os.makedirs('output', exist_ok=True) 170 171 if args.exclude: 172 with open('output/exclude.conf', 'w') as fp: 173 for item in results: 174 fp.write(f'# AS{item} - {results[item]["name"]}\n') 175 for version in results[item]['ranges']: 176 for _range in results[item]['ranges'][version]: 177 fp.write(_range+'\n') 178 fp.write('\n') 179 else: 180 with open('output/out.json', 'w') as fp: 181 json.dump(results, fp) 182 else: 183 logging.info('Add these to your custom.txt file to create output files...') 184 185 total_v4 = ipaddress.ip_network('0.0.0.0/0').num_addresses 186 total_v6 = ipaddress.ip_network('::/0').num_addresses 187 print('Total IPv4 Addresses : {0:,}'.format(total_v4)) 188 print('Total IPv4 After Clean : {0:,}'.format(total_v4-grand_total['4'])) 189 print('Total IPv6 Addresses : {0:,}'.format(total_v6)) 190 print('Total IPv6 After Clean : {0:,}'.format(total_v6-grand_total['6']))