Create a Python file in the main directory of the VPS (for example: proxy_test.py). After creating it, copy and paste the code below, and modify the proxy URL or path as needed.
import requests
import concurrent.futures
import time
import os
import json
from urllib3.exceptions import InsecureRequestWarning
# Suppress only the InsecureRequestWarning from urllib3
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# --- Configuration Parameters ---
# **Updated URL: Using a more stable public proxy list URL**
PROXY_URL = 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt'
# Target used for checking proxy anonymity (must echo back request data)
ANONYMITY_TARGET = 'http://httpbin.org/get'
# Stricter, real-world target for final connectivity check (tests SSL/TLS)
FINAL_CONNECTIVITY_TARGET = 'https://www.google.com'
# **UPDATE:** Single proxy test total timeout (seconds). Set back to 5s.
TIMEOUT = 5
# **NEW/UPDATED:** Maximum acceptable total latency (seconds). Anything slower will be rejected.
MAX_LATENCY = 4.5
# Max concurrent threads - Adjusted to 200 for better stability
MAX_WORKERS = 180
# ------------------
def fetch_proxies(url):
"""
Fetches the proxy list from the specified URL.
Includes detailed headers to simulate a browser.
"""
print(f"Trying to fetch the proxy list from {url}...")
try:
# Browser-like headers
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Referer': 'https://www.google.com/',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
}
# Disable SSL verification due to common proxy issues
response = requests.get(url, headers=headers, timeout=10, verify=False)
response.raise_for_status()
proxies = [p.strip() for p in response.text.splitlines() if p.strip()]
print(f"Successfully fetched {len(proxies)} proxies.")
return proxies
except requests.exceptions.RequestException as e:
print(f"❌ Error: Failed to fetch the proxy list, please check the URL or network connection. Error message: {e}")
return []
def get_real_ip():
"""
Fetches the client's real IP address for comparison using two methods
for improved robustness.
"""
# --- Attempt Method 1: httpbin.org ---
try:
print("Trying to get the real IP using httpbin.org...")
response = requests.get(ANONYMITY_TARGET, timeout=5)
response.raise_for_status()
real_ip = response.json().get('origin')
if real_ip:
# httpbin.org sometimes returns multiple IP addresses, we only take the first one
return real_ip.split(',')[0].strip()
except Exception as e:
print(f"Failed to get from httpbin.org: {type(e).__name__}. Trying fallback...")
# --- Attempt Method 2: api.ipify.org (fallback) ---
try:
response = requests.get('https://api.ipify.org?format=json', timeout=5)
response.raise_for_status()
return response.json().get('ip')
except Exception as e:
print(f"Failed to get from api.ipify.org: {type(e).__name__}.")
return None
def check_proxy(proxy, real_ip):
"""
Tests a single proxy for anonymity, speed (latency), and real-world connectivity.
Returns (proxy, True, reason, latency) if successful.
"""
ip_port = proxy.split(':')
if len(ip_port) != 2:
return proxy, False, "Format error", None
proxy_ip = ip_port[0]
proxies = {
'http': f'http://{proxy}',
'https': f'http://{proxy}',
}
# --- 1. Anonymity Check (using ANONYMITY_TARGET) ---
try:
start_time = time.time()
# Use the global TIMEOUT for initial connection
response = requests.get(ANONYMITY_TARGET, proxies=proxies, timeout=TIMEOUT, verify=False)
# Basic connectivity check
if response.status_code != 200:
return proxy, False, "Connection/Status code abnormal", None
# Anonymity check
response_json = response.json()
reported_ip = response_json.get('origin')
# Anonymity Logic: Reported IP must not be real IP AND must match proxy's IP
if not (reported_ip and reported_ip != real_ip and reported_ip.startswith(proxy_ip)):
return proxy, False, "Transparent/Identity information leakage", None
except requests.exceptions.Timeout:
return proxy, False, f"Timeout (>{TIMEOUT}s)", None
except Exception as e:
return proxy, False, f"Anonymity check failed: {type(e).__name__}", None
# --- 2. Final Real-World Connectivity Check (using FINAL_CONNECTIVITY_TARGET) ---
try:
# Test against Google. Use the global TIMEOUT for final test.
requests.get(FINAL_CONNECTIVITY_TARGET, proxies=proxies, timeout=TIMEOUT, verify=False).raise_for_status()
# Calculate total latency since the start of the first test
total_latency = time.time() - start_time
# **SPEED CHECK:** Only accept proxies under the maximum latency threshold
if total_latency > MAX_LATENCY:
return proxy, False, f"Speed too slow (total latency: {total_latency:.2f}s)", None
# Success if we reach here, status code is good, and speed is acceptable
return proxy, True, f"Highly anonymous/Anonymous/Fast connectivity (total latency: {total_latency:.2f}s)", total_latency
except Exception as e:
# If it passed anonymity but failed the final Google test, it's blocked/incompatible
return proxy, False, f"Blocked by the target website: {type(e).__name__}", None
def batch_check(proxies, real_ip):
"""Uses multi-threading to batch test proxies."""
successful_proxies = []
total_proxies = len(proxies)
# Use ThreadPoolExecutor for concurrency
with concurrent.futures.ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
# Submit all tasks, passing the real IP to each check function
future_to_proxy = {executor.submit(check_proxy, proxy, real_ip): proxy for proxy in proxies}
# List to store (proxy, latency) for sorting
working_proxies_with_latency = []
# Retrieve results one by one, printing progress
for i, future in enumerate(concurrent.futures.as_completed(future_to_proxy)):
proxy = future_to_proxy[future]
try:
# The function returns 4 values: (proxy, is_working, reason, latency)
proxy, is_working, reason, latency = future.result()
except Exception as exc:
is_working = False
latency = None
reason = f"Thread error: {exc}"
# Print real-time progress
progress = f"[{i+1}/{total_proxies}]"
print(f"\r{progress} Checking... Current available: {len(working_proxies_with_latency)}", end="", flush=True)
if is_working:
working_proxies_with_latency.append((proxy, latency))
# Sort proxies by latency (fastest first)
working_proxies_with_latency.sort(key=lambda x: x[1])
successful_proxies = [p[0] for p in working_proxies_with_latency]
# Clear progress output and newline
print("\n")
return successful_proxies
if __name__ == '__main__':
start_time = time.time()
# 0. Get the client's real IP for anonymity comparison
print("Fetching your real IP address for anonymity comparison...")
real_ip = get_real_ip()
if not real_ip:
print("❌ Warning: Unable to get real IP, skipping anonymity check, proxy quality may decrease.")
real_ip = '0.0.0.0' # Use a fallback value
else:
print(f"✅ Your real IP address is: {real_ip}")
# 1. Fetch proxy list
proxy_list = fetch_proxies(PROXY_URL)
if not proxy_list:
print("Program terminated, failed to fetch the proxy list.")
else:
# Updated output to reflect the new speed limit
print(f"Starting to test {len(proxy_list)} proxies (require anonymity, speed < {MAX_LATENCY:.1f}s, target: {FINAL_CONNECTIVITY_TARGET}), concurrency: {MAX_WORKERS}...")
# 2. Batch check
successful_proxies = batch_check(proxy_list, real_ip)
end_time = time.time()
# 3. Summary and saving results
print("\n--- Result Summary ---")
print(f"Total time: {end_time - start_time:.2f} seconds")
print(f"Total proxies: {len(proxy_list)}")
print(f"Available (Highly anonymous/Anonymous/Fast) proxies: {len(successful_proxies)}")
if len(proxy_list) > 0:
print(f"Success rate: {len(successful_proxies)/len(proxy_list)*100:.2f}%")
output_file = 'working_proxies.txt'
with open(output_file, 'w') as f:
f.write('\n'.join(successful_proxies))
print(f"\n✅ Available proxies have been saved to file: {output_file} (sorted by latency, fastest first)")
Below is the SOCKS5 batch testing script
import requests
import concurrent.futures
import time
import os
import json
from urllib3.exceptions import InsecureRequestWarning
# Note: To use SOCKS5 proxies, you must first run: pip install requests[socks]
# Suppress only the InsecureRequestWarning from urllib3
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# --- Configuration Parameters ---
# **Updated: Using SOCKS5 proxy list URL**
PROXY_URL = 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt'
# Target used for checking proxy anonymity (must return request data)
ANONYMITY_TARGET = 'http://httpbin.org/get'
# Stricter, real-world target for final connectivity check (tests SSL/TLS)
FINAL_CONNECTIVITY_TARGET = 'https://www.google.com'
# Single proxy test connection timeout (seconds).
TIMEOUT = 5
# Maximum acceptable total latency (seconds). Any proxy slower than this will be rejected.
MAX_LATENCY = 4.0
# Maximum concurrent threads - Adjusted to 180 for better stability
MAX_WORKERS = 180
# ------------------
def fetch_proxies(url):
"""
Fetches the proxy list from the specified URL.
"""
print(f"Trying to fetch the proxy list from {url}...")
try:
# Browser-like headers
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Referer': 'https://www.google.com/',
'Accept-Encoding': 'gzip, deflate, br',
'Connection': 'keep-alive',
}
# Disable SSL verification
response = requests.get(url, headers=headers, timeout=10, verify=False)
response.raise_for_status()
proxies = [p.strip() for p in response.text.splitlines() if p.strip()]
print(f"Successfully fetched {len(proxies)} proxies.")
return proxies
except requests.exceptions.RequestException as e:
print(f"❌ Error: Failed to fetch the proxy list, please check the URL or network connection. Error message: {e}")
return []
def get_real_ip():
"""
Fetches the client's real IP address for comparison, using a dual fallback mechanism.
"""
# --- Attempt Method 1: httpbin.org ---
try:
print("Trying to get the real IP using httpbin.org...")
response = requests.get(ANONYMITY_TARGET, timeout=5)
response.raise_for_status()
real_ip = response.json().get('origin')
if real_ip:
# httpbin.org sometimes returns multiple IP addresses, we only take the first one
return real_ip.split(',')[0].strip()
except Exception as e:
print(f"Failed to get from httpbin.org: {type(e).__name__}. Trying fallback...")
# --- Attempt Method 2: api.ipify.org (fallback) ---
try:
response = requests.get('https://api.ipify.org?format=json', timeout=5)
response.raise_for_status()
return response.json().get('ip')
except Exception as e:
print(f"Failed to get from api.ipify.org: {type(e).__name__}.")
return None
def check_proxy(proxy, real_ip):
"""
Tests a single proxy for anonymity, speed, and actual connectivity.
"""
ip_port = proxy.split(':')
if len(ip_port) != 2:
return proxy, False, "Format error", None
proxy_ip = ip_port[0]
# **Key modification: Use socks5:// protocol**
proxies = {
'http': f'socks5://{proxy}',
'https': f'socks5://{proxy}',
}
# --- 1. Anonymity Check (using ANONYMITY_TARGET) ---
try:
start_time = time.time()
# Use the global TIMEOUT for initial connection
response = requests.get(ANONYMITY_TARGET, proxies=proxies, timeout=TIMEOUT, verify=False)
# Basic connectivity check
if response.status_code != 200:
return proxy, False, "Connection/Status code abnormal", None
# Anonymity check
response_json = response.json()
reported_ip = response_json.get('origin')
# Anonymity Logic: Reported IP must not be real IP AND must match proxy's IP
if not (reported_ip and reported_ip != real_ip and reported_ip.startswith(proxy_ip)):
return proxy, False, "Transparent/Identity information leakage", None
except requests.exceptions.Timeout:
return proxy, False, f"Timeout (>{TIMEOUT}s)", None
except Exception as e:
return proxy, False, f"Anonymity check failed: {type(e).__name__}", None
# --- 2. Final Real-World Connectivity Check (using FINAL_CONNECTIVITY_TARGET) ---
try:
# Test against Google. Use the global TIMEOUT for final test.
requests.get(FINAL_CONNECTIVITY_TARGET, proxies=proxies, timeout=TIMEOUT, verify=False).raise_for_status()
# Calculate total latency since the start of the first test
total_latency = time.time() - start_time
# **Speed check:** Only accept proxies under the maximum latency threshold
if total_latency > MAX_LATENCY:
return proxy, False, f"Speed too slow (total latency: {total_latency:.2f}s)", None
# Success if we reach here, status code is good, and speed is acceptable
return proxy, True, f"Highly anonymous/Anonymous/Fast connectivity (total latency: {total_latency:.2f}s)", total_latency
except Exception as e:
# If it passed anonymity but failed the final Google test, it's blocked/incompatible
return proxy, False, f"Blocked by the target website: {type(e).__name__}", None
def batch_check(proxies, real_ip):
"""Uses multi-threading to batch test proxies."""
successful_proxies = []
total_proxies = len(proxies)
# Use ThreadPoolExecutor for concurrency
with concurrent.futures.ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
# Submit all tasks, passing the real IP to each check function
future_to_proxy = {executor.submit(check_proxy, proxy, real_ip): proxy for proxy in proxies}
# List to store (proxy, latency) for sorting
working_proxies_with_latency = []
# Retrieve results one by one, printing progress
for i, future in enumerate(concurrent.futures.as_completed(future_to_proxy)):
proxy = future_to_proxy[future]
try:
# The function returns 4 values: (proxy, is_working, reason, latency)
proxy, is_working, reason, latency = future.result()
except Exception as exc:
is_working = False
latency = None
reason = f"Thread error: {exc}"
# Print real-time progress
progress = f"[{i+1}/{total_proxies}]"
print(f"\r{progress} Checking... Current available: {len(working_proxies_with_latency)}", end="", flush=True)
if is_working:
working_proxies_with_latency.append((proxy, latency))
# Sort proxies by latency (fastest first)
working_proxies_with_latency.sort(key=lambda x: x[1])
successful_proxies = [p[0] for p in working_proxies_with_latency]
# Clear progress output and newline
print("\n")
return successful_proxies
if __name__ == '__main__':
start_time = time.time()
# 0. Get the client's real IP for anonymity comparison
print("Fetching your real IP address for anonymity comparison...")
real_ip = get_real_ip()
if not real_ip:
print("❌ Warning: Unable to get real IP, skipping anonymity check, proxy quality may decrease.")
real_ip = '0.0.0.0' # Use a fallback value
else:
print(f"✅ Your real IP address is: {real_ip}")
# 1. Fetch proxy list
proxy_list = fetch_proxies(PROXY_URL)
if not proxy_list:
print("Program terminated, failed to fetch the proxy list.")
else:
# Updated output to reflect the new speed limit
print(f"Starting to test {len(proxy_list)} proxies (require anonymity, speed < {MAX_LATENCY:.1f}s, target: {FINAL_CONNECTIVITY_TARGET}), concurrency: {MAX_WORKERS}...")
# 2. Batch check
successful_proxies = batch_check(proxy_list, real_ip)
end_time = time.time()
# 3. Summary and saving results
print("\n--- Result Summary ---")
print(f"Total time: {end_time - start_time:.2f} seconds")
print(f"Total proxies: {len(proxy_list)}")
print(f"Available (Highly anonymous/Anonymous/Fast) proxies: {len(successful_proxies)}")
if len(proxy_list) > 0:
print(f"Success rate: {len(successful_proxies)/len(proxy_list)*100:.2f}%")
output_file = 'working_socks5_proxies.txt'
with open(output_file, 'w') as f:
f.write('\n'.join(successful_proxies))
print(f"\n✅ Available proxies have been saved to file: {output_file} (sorted by latency, fastest first)")
Before executing, please check the version and install the required libraries.
Check Python version#
python3 --version
Check pip (Python package manager) version#
pip3 --version
If the command does not exist, you need to execute:
sudo apt update
sudo apt install python3 python3-pip -y
Then install the virtual environment toolkit
sudo apt update
sudo apt install python3-venv -y
Enter the main directory to create a virtual environment
cd ~
python3 -m venv proxy_venv
Activate the environment (after activation, the root will be prefixed with (proxy_venv))
source proxy_venv/bin/activate
Install libraries in the environment
pip install requests
After installation, execute the script within the environment
python proxy_test.py