proxytools- collection of scripts for harvesting & testing proxies |
git clone git://git.acid.vegas/proxytools.git |
Log | Files | Refs | Archive | README | LICENSE |
commit 9ab3a3f541262dd9811f6bc1ca39e1c2604e94d5
parent 2372f8f10130b449f4925ada9df2f13015907314 Author: acidvegas <acid.vegas@acid.vegas> Date: Sat, 10 Jun 2023 15:37:18 -0400 Added note about concurrent.futures possibility for larger lists Diffstat:
|
1 file changed, 2 insertions(+), 1 deletion(-) |
diff --git a/sockhub.py b/sockhub.py @@ -7,6 +7,7 @@ Scrap IP:PORT proxies from a URL list ''' +import concurrent.futures import os import re import time @@ -63,7 +64,7 @@ total = 0 proxies = list() proxy_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'proxies.txt') print('scanning \033[35m{0:,}\033[0m urls from list...'.format(len(urls))) -for url in urls: +for url in urls: # TODO: Maybe add concurrent.futures support for using larger lists try: source = get_source(url) except: |