这是一款自己开发的fofa爬取数据小软件,打包命令注释里有!
主要就是将自己的邮箱和key配置好即可!
城市这些,我只放了中国和美国的,有需要的自己再进行扩展!
打包好后,只需要将语法放进去,即可自己爬取并保存数据到桌面!
import os
import threading
from tkinter import *
from tkinter import messagebox
import requests
import base64
API_url = r"https://fofa.info/api/v1/search/all?email={}&key={}&qbase64={}&page={}&size={}"
email = '邮箱'
key = 'key'
# 打包命令:pyinstaller -Fw .\fofav_1.py //-Fw表示隐藏 黑乎乎的cmd窗口 -F则不隐藏
# region
cn_region = ['', 'Beijing', 'Zhejiang', 'Guangdong', 'Shandong', 'Shanghai', 'Sichuan', 'Jiangsu', 'Fujian', 'Anhui',
'Henan', 'Hubei', 'Liaoning', 'Hunan', 'Shaanxi', 'Chongqing', 'Hebei', 'Jilin', 'Guizhou', 'Yunnan',
'Ningxia Hui Autonomous Region', 'Jiangxi', 'Heilongjiang', 'Tianjin', 'Guangxi', 'Qinghai',
'Inner Mongolia Autonomous Region', 'Shanxi', 'Hainan', 'Gansu', 'Xinjiang Uyghur Autonomous Region',
'Central and Western District', 'Sai Kung District', 'Kowloon City', 'Tai Po District', 'Sha Tin',
'Tsuen Wan District', 'Tuen Mun', 'Yau Tsim Mong', 'Eastern', 'Islands District', 'Kwun Tong', 'Wan Chai',
'Wong Tai Sin', 'Yuen Long District', 'TW', 'MO']
us_region = ['', 'Illinois', 'California', 'Virginia', 'Oregon', 'Iowa', 'New Jersey', 'Texas', 'Washington',
'North Carolina', 'Ohio', 'Georgia', 'Florida', 'New York', 'Missouri', 'South Carolina', 'Kansas',
'Arizona', 'Colorado', 'Pennsylvania', 'Wisconsin', 'Maine', 'Michigan', 'Nevada', 'Indiana', 'Oklahoma',
'Utah', 'Massachusetts', 'Nebraska', 'Minnesota', 'Connecticut', 'District of Columbia', 'Maryland',
'Alabama', 'New Mexico', 'Kentucky', 'Mississippi', 'Tennessee', 'South Dakota', 'Louisiana', 'Arkansas',
'Delaware', 'Idaho', 'Montana', 'New Hampshire', 'Wyoming', 'Hawaii', 'North Dakota', 'West Virginia',
'Alaska']
def get_data():
# 创建子线程执行
t1 = threading.Thread(target=getFFData)
t1.start()
# 获取接口数据
def getFFData():
regu = reg_txt.get()
siz = size_text.get()
pag = page_text.get()
file_string = r'\\' + file_txt.get() + '.txt'
if 'country="CN"' in regu:
loop_region(cn_region, regu, siz, pag, file_string)
elif 'country="US"' in regu:
loop_region(us_region, regu, siz, pag, file_string)
else:
do_search(regu, siz, pag, file_string)
messagebox.showwarning("通知!", "爬取完成!")
def loop_region(region_array, regu, siz, pag, file_string):
for region in region_array:
# regu += "&& region=\"" + region + "\""
do_search(regu + "&& region=\"" + region + "\"", siz, pag, file_string)
def do_search(regu, siz, pag, file_string):
# print(regu)
txt.insert(INSERT, regu + "\n")
txt.see(END)
flag = base64.b64encode(regu.encode()).decode()
# print(API_url.format(email, key, flag, pag, siz))
for i in range(1, int(pag) + 1):
print(i)
try:
res = requests.get(API_url.format(email, key, flag, str(i), siz))
print(API_url.format(email, key, flag, str(i), siz))
# print(res.json()["error"])
if res.json()["error"]:
print("没有数据")
# messagebox.showwarning("警告!", "没有数据!")
txt.insert(INSERT, "警告!,没有数据!" + "\n")
txt.see(END)
break
else:
results = res.json()["results"]
# print(results)
if len(results) == 0:
# messagebox.showwarning("警告!", "没有这么多页数据!当前空页:" + str(i))
txt.insert(INSERT, "警告!,没有这么多页数据!当前空页:" + str(i) + "\n")
txt.see(END)
break
for result in results:
write_data(desktop_path() + file_string, result[0] + "\n")
txt.insert(INSERT, str(result[0]) + "\n")
txt.see(END)
except:
# messagebox.showwarning("警告!", "爬取出现错误!")
txt.insert(INSERT, "警告!,爬取出现错误!" + "\n")
txt.see(END)
def desktop_path():
return os.path.join(os.path.expanduser('~'), "Desktop")
def write_data(path, data):
with open(path, 'a', encoding='utf-8') as f:
f.write(data)
def reset():
reg_txt.delete(0, END)
page_text.delete(0, END)
txt.delete(0.1, END)
root = Tk() # Makes main window
root.wm_title("FOFA_V1.0爬虫") # 标题
# window.config(background="#00FFCC") # 背景色
root.geometry('500x240')
reg = Label(root, text='请输入搜索规则')
reg.place(relx=0, rely=0, relwidth=0.7, height=25)
reg_txt = Entry(root)
reg_txt.place(relx=0, rely=0.1, relwidth=0.7, height=25)
size = Label(root, text='请输入长度,默认100,最大10000')
size.place(relx=0, rely=0.2, relwidth=0.7, relheight=0.1)
size_text = Entry(root)
size_text.place(relx=0, rely=0.3, relwidth=0.7, relheight=0.1)
page = Label(root, text='请输入爬取页数')
page.place(relx=0, rely=0.4, relwidth=0.7, relheight=0.1)
page_text = Entry(root)
page_text.place(relx=0, rely=0.5, relwidth=0.7, relheight=0.1)
file_ = Label(root, text='保存文本名称')
file_.place(relx=0, rely=0.6, relwidth=0.7, relheight=0.1)
file_txt = Entry(root)
file_txt.place(relx=0, rely=0.7, relwidth=0.7, relheight=0.1)
btn1 = Button(root, text='开始爬取', command=get_data)
btn1.place(relx=0, rely=0.8, relwidth=0.3, relheight=0.1)
btn2 = Button(root, text='重置', command=reset)
btn2.place(relx=0.5, rely=0.8, relwidth=0.3, relheight=0.1)
txt = Text(wrap=WORD)
scroll = Scrollbar()
# 放到窗口的右侧, 填充Y竖直方向
scroll.pack(side=RIGHT, fill=Y)
# 两个控件关联
scroll.config(command=txt.yview)
txt.config(yscrollcommand=scroll.set)
txt.place(rely=0.9, relheight=0.4)
if __name__ == '__main__':
root.mainloop()