proxytools- collection of scripts for harvesting & testing proxies |
git clone git://git.acid.vegas/proxytools.git |
Log | Files | Refs | Archive | README | LICENSE |
shellsocked (1069B)
1 #!/bin/env bash 2 # shellsocked - developed by acidvegas (https://git.acid.vegas/proxytools) 3 4 # Probably the most basic proxy scraper ever made, pure POSIX, no dependencies, no bullshit. 5 # Duplicate proxies are removed and the output is sorted and saved to a file. 6 # Feed it a single URL or a file with a list of URLs to scrape. 7 8 scrape_url() { 9 local url="$1" 10 local proxies=$(curl -s -A "ShellSocked/1.0" "$url" | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+:[0-9]+' | awk '!seen[$0]++') 11 local count=$(echo "$proxies" | wc -l) 12 PROXIES="${PROXIES}${proxies}" 13 echo -e "Found \033[32m${count}\033[0m proxies on \033[33m${url}\033[0m" 14 } 15 16 if [ -n "$1" ]; then 17 PROXIES="" 18 if [ -f "$1" ]; then 19 while IFS= read -r url; do 20 scrape_url "$url" 21 done < "$1" 22 else 23 scrape_url "$1" 24 fi 25 else 26 echo "Usage: $0 <input_file | single_url>" 27 exit 1 28 fi 29 30 PROXIES=$(printf "%s\n" "$PROXIES" | sort -u) 31 printf "%s\n" "$PROXIES" > proxies.txt 32 33 total_count=$(echo "$PROXIES" | wc -l) 34 echo "Grand Total: ${total_count} proxies"