新手Python黑客工具入门(续)
字数 1152 2025-08-18 11:37:07
Python黑客工具入门教程(续)
前言
本教程面向Python安全新手,介绍如何使用Python开发实用的黑客工具。环境要求:
- Windows系统
- Python 3.7
- 所需库:requests、bs4、optparse
基础库介绍
Requests库
相比urllib等模块,Requests使用更加简便。官方中文文档:
http://docs.python-requests.org/zh_CN/latest/user/quickstart.html
BeautifulSoup4 (bs4)
bs4是一个爬虫框架,我们主要使用其解析HTML的功能。中文文档:
http://beautifulsoup.readthedocs.io/zh_CN/v4.4.0/
实战工具开发
1. 代理IP爬取工具(西刺代理)
import requests
import re
from bs4 import BeautifulSoup
def daili():
print('[+]极速爬取代理IP,默认为99页')
for b in range(1,99):
url="http://www.xicidaili.com/nt/{}".format(b)
header={'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:58.0) Gecko/20100101 Firefox/48.0'}
r=requests.get(url,headers=header)
gsx=BeautifulSoup(r.content,'html.parser')
for line in gsx.find_all('td'):
sf=line.get_text()
dailix=re.findall('(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)\.(25[0-5]|2[0-4]\d|[0-1]\d{2}|[1-9]?\d)',str(sf))
for g in dailix:
po=".".join(g)
print(po)
with open('采集到的IP.txt','a') as l:
l.write(po+'\n')
功能特点:
- 爬取西刺代理网站的普通代理IP
- 自动翻页(默认99页)
- 结果保存到"采集到的IP.txt"文件
- 使用正则表达式精确匹配IP格式
2. 信息搜集工具(站长之家)
import optparse
import requests
import re
import sys
from bs4 import BeautifulSoup
def main():
usage="[-z Subdomain mining] [-p Side of the station inquiries] [-x http status query]"
parser=optparse.OptionParser(usage)
parser.add_option('-z',dest="Subdomain",help="Subdomain mining")
parser.add_option('-p',dest='Side',help='Side of the station inquiries')
parser.add_option('-x',dest='http',help='http status query')
(options,args)=parser.parse_args()
if options.Subdomain:
subdomain=options.Subdomain
Subdomain(subdomain)
elif options.Side:
side=options.Side
Side(side)
elif options.http:
http=options.http
Http(http)
else:
parser.print_help()
sys.exit()
def Subdomain(subdomain):
print('-----------Subdomains quickly tap-----------')
url="http://m.tool.chinaz.com/subdomain/?domain={}".format(subdomain)
header={'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
r=requests.get(url,headers=header).content
g = re.finditer('<td>\D[a-zA-Z0-9][-a-zA-Z0-9]{0,62}\D(\.[a-zA-Z0-9]\D[-a-zA-Z0-9]{0,62})+\.?</td>', str(r))
for x in g:
lik="".join(str(x))
opg=BeautifulSoup(lik,'html.parser')
for link in opg.find_all('td'):
lops=link.get_text()
print(lops)
功能特点:
- 支持三种查询模式:
-z子域名挖掘-p旁站查询-xHTTP状态查询
- 使用正则表达式精确匹配目标信息
- 友好的命令行界面
3. API调用工具
import requests
import optparse
import json
def main():
usage='usage:[-i IP query] [-m National wifi lat] [-l National wifi lon] [-x Daily News] [-t Info querry]'
parser=optparse.OptionParser(usage)
parser.add_option('-i',dest='ip',help='ip to query')
parser.add_option('-m',dest='wifi',help='National wifi lat')
parser.add_option('-l',dest='wifilon',help='National wifi lon')
parser.add_option('-x',action='store_true',dest='Daily',help='Daily News')
parser.add_option('-t',dest='info',help='info to query')
(options,args)=parser.parse_args()
if options.ip:
ipquery=options.ip
Ipquery(ipquery)
elif options.wifi and options.wifilon:
wifi=options.wifi
wifilon=options.wifilon
Wifi(wifi,wifilon)
elif options.Daily:
Daily()
elif options.info:
info=options.info
Info(info)
else:
parser.print_help()
exit()
def Ipquery(ipquery):
url="http://api.avatardata.cn/IpLookUp/LookUp?key=6a4c1df4ba10453da7ee1d50165bfd08&ip={}".format(ipquery)
header={'User-Agent':'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
r=requests.get(url,headers=header)
sdw=r.content.decode('utf-8')
lks=json.loads(sdw)
print('[*]ip',ipquery)
print('[*]area:',lks['result']['area'])
print('[*]location:',lks['result']['location'])
功能特点:
- 支持多种API查询:
- IP查询
- 全国免费WiFi查询(需经纬度)
- 每日新闻
- 天气信息查询
- 使用JSON解析API返回数据
- 需要自行申请API key
4. MD5解密工具
import requests
from bs4 import BeautifulSoup
import optparse
def main():
usage="[-m md5 decryption]"
parser=optparse.OptionParser(usage)
parser.add_option('-m',dest='md5',help='md5 decryption')
(options,args)=parser.parse_args()
if options.md5:
md5=options.md5
Md5(md5)
else:
parser.print_help()
exit()
def Md5(md5):
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
data = {
'__VIEWSTATE': '/wEPDwUKMTM4NTE3OTkzOWRkP4hmXYtPPhcBjbupZdLOLfmeTK4=',
'__VIEWSTATEGENERATOR': 'CA0B0334',
'__EVENTVALIDATION': '/wEWAwK75ZuyDwLigPTXCQKU9f3vAheUenitfEuJ6eGUVe2GyFzb7HKC',
'key': '{}'.format(md5),
'jiemi': 'MD5解密'
}
url = "http://pmd5.com/"
r = requests.post(url, headers=header, data=data)
sd = r.content.decode('utf-8')
esdf = BeautifulSoup(sd, 'html.parser')
for l in esdf.find_all('em'):
g = l.get_text()
print('--------[*]PMD5接口--------')
print(g)
实现原理:
- 通过分析pmd5.com网站的POST请求
- 使用requests模拟表单提交
- 解析返回的HTML获取解密结果
5. Shodan API工具
import optparse
import shodan
def main():
usage='[usage: -j Type what you want] [-i IP to search] [-s Todays camera equipment]'
parser=optparse.OptionParser(usage)
parser.add_option('-j',dest='jost',help='Type what you want')
parser.add_option('-i',dest='host',help='IP to search')
parser.add_option('-s',action='store_true',dest='query',help='Todays camera equipment')
(options,args)=parser.parse_args()
if options.jost:
jost=options.jost
Jost(jost)
elif options.host:
host=options.host
Host(host)
elif options.query:
query()
else:
parser.print_help()
exit()
def Jost(jost):
SHODAN_API_KEY='你的API_KEY'
api=shodan.Shodan(SHODAN_API_KEY)
try:
result=api.search('{}'.format(jost))
print('[*]Results found:{}'.format(result['total']))
for x in result['matches']:
print('IP{}'.format(x['ip_str']))
print(x['data'])
with open('shodan.txt','a') as p:
p.write(x['ip_str']+'\n')
p.write(x['data']+'\n')
except shodan.APIError as e:
print('[-]Error:',e)
功能特点:
- 需要注册Shodan获取API key
- 支持三种查询模式:
-j关键词搜索-iIP搜索-s弱口令摄像头查询
- 结果保存到shodan.txt文件
6. DNS查询工具
from dnsknife.scanner import Scanner
import dnsknife
import optparse
import sys
def main():
usage="[-i Fast query] [-d DNS domain transmission vulnerability detection]"
parser=optparse.OptionParser(usage)
parser.add_option('-i',dest='Fastquery',help='Quickly check some dns records')
parser.add_option('-d',dest='detection',help='Detects possible DNS transmission vulnerabilities')
(options,parser)=parser.parse_args()
if options.Fastquery:
Fastquery=options.Fastquery
query(Fastquery)
elif options.detection:
detection=options.detection
vulnerability(detection)
else:
sys.exit()
def query(Fastquery):
print '--------mx record--------'
try:
dns=dnsknife.Checker(Fastquery).mx()
for x in dns:
print x
except Exception , c:
print '[-]wrong reason:',c
print '--------txt record--------'
try:
dnstxt=dnsknife.Checker(Fastquery).txt()
print dnstxt
except Exception , g:
print '[-]wrong reason:',g
注意事项:
- 需要安装dnsknife模块:
pip install dnsknife - Windows下可能报错,建议在Linux环境下运行
- 支持两种查询模式:
- 快速DNS记录查询
- DNS域传输漏洞检测
好书推荐
Python相关:
- 《Python绝技》
- 《Python黑帽子》
- 《Python网络数据采集》
- 《Python网络编程》
安全相关:
- 《黑客攻防技术宝典:Web篇》
- 《黑客攻防技术宝典:浏览器实战篇》
网络相关:
- 《思科网络技术学院教程》(上、下)
总结
本教程介绍了多种实用的Python安全工具开发方法,涵盖了从基础爬虫到专业API调用的多个方面。这些工具可以帮助安全研究人员更高效地进行信息收集和漏洞检测工作。在实际使用时,请注意遵守相关法律法规,仅将这些工具用于合法的安全测试和研究目的。