最近在弄点蛋疼的东西.爬虫,扫描。扫描交给sqlmapapi来进行.现在的资料不是很多,但是还是可以找到一些
《使用sqlmapapi.py批量化扫描实践》http://drops.wooyun.org/tips/6653
看看他所封装的sqlmapapi的类
#!/usr/bin/python
#-*-coding:utf-8-*-
import requests
import time
import json
class AutoSqli(object):
"""
使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
By Manning
"""
def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
super(AutoSqli, self).__init__()
self.server = server
if self.server[-1] != '/':
self.server = self.server + '/'
self.target = target
self.taskid = ''
self.engineid = ''
self.status = ''
self.data = data
self.referer = referer
self.cookie = cookie
self.start_time = time.time()
#新建扫描任务
def task_new(self):
self.taskid = json.loads(
requests.get(self.server + 'task/new').text)['taskid']
print 'Created new task: ' + self.taskid
#得到taskid,根据这个taskid来进行其他的
if len(self.taskid) > 0:
return True
return False
#删除扫描任务
def task_delete(self):
if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
print '[%s] Deleted task' % (self.taskid)
return True
return False
#扫描任务开始
def scan_start(self):
headers = {'Content-Type': 'application/json'}
#需要扫描的地址
payload = {'url': self.target}