實測好用的awvs批量添加任務腳本


#! /usr/bin/env python  
# -*- coding:utf-8 -*-  
# __author__ : "Ca1m" 
# Time : 2020/11/14 15:21


import requests
import json
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)


apikey = '1986ad8c0a5b3df4d7028d5f3c06e936c61d7af90a46243638a6adb24b99f61ee'#API
headers = {'Content-Type': 'application/json',"X-Auth": apikey}


def addTask(url,target):
    try:
        url = ''.join((url, '/api/v1/targets/add'))
        data = {"targets":[{"address": target,"description":""}],"groups":[]}
        r = requests.post(url, headers=headers, data=json.dumps(data), timeout=30, verify=False)
        result = json.loads(r.content.decode())
        return result['targets'][0]['target_id']
    except Exception as e:
        return e
def scan(url,target,Crawl,user_agent,profile_id,proxy_address,proxy_port):
    scanUrl = ''.join((url, '/api/v1/scans'))
    target_id = addTask(url,target)

    if target_id:
        data = {"target_id": target_id, "profile_id": profile_id, "incremental": False, "schedule": {"disable": False, "start_date": None, "time_sensitive": False}}
        try:
            configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent)
            response = requests.post(scanUrl, data=json.dumps(data), headers=headers, timeout=30, verify=False)
            result = json.loads(response.content)
            return result['target_id']
        except Exception as e:
            print(e)

def configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent):
    configuration_url = ''.join((url,'/api/v1/targets/{0}/configuration'.format(target_id)))
    data = {"scan_speed":"fast","login":{"kind":"none"},"ssh_credentials":{"kind":"none"},"sensor": False,"user_agent": user_agent,"case_sensitive":"auto","limit_crawler_scope": True,"excluded_paths":[],"authentication":{"enabled": False},"proxy":{"enabled": Crawl,"protocol":"http","address":proxy_address,"port":proxy_port},"technologies":[],"custom_headers":[],"custom_cookies":[],"debug":False,"client_certificate_password":"","issue_tracker_id":"","excluded_hours_id":""}
    r = requests.patch(url=configuration_url,data=json.dumps(data), headers=headers, timeout=30, verify=False)
def main():
    Crawl = False
    proxy_address = '127.0.0.1'
    proxy_port = '8888'
    awvs_url = 'https://192.168.21.130:13443/' #awvs url
    with open('url.txt','r',encoding='utf-8') as f:
        targets = f.readlines()
    profile_id = "11111111-1111-1111-1111-111111111111"
    user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.21 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.21" #掃描默認UA頭
    if Crawl:
        profile_id = "11111111-1111-1111-1111-111111111117"
    for target in targets:
        target = target.strip()
        if scan(awvs_url,target,Crawl,user_agent,profile_id,proxy_address,int(proxy_port)):
            print("{0} 添加成功".format(target))

if __name__ == '__main__':
    main()

配置

  • 將apikey替換為自己的
  • 將需要掃描的url放入到腳本同一目錄下的url.txt
  • 將awvs_url改為自己awvs地址
  • 如果和需要和X-RAY配合使用可自行修改 將Crawl 改為 True
  • proxy_address 改為代理地址
  • proxy_port 改為代理端口
  • 如果還想使用其他掃描類型可以自行修改profile_id
  • 如果想修改UA頭自行修改user_agent

 

AWVS13掃描類型scanTypes

類型 profile_id
Full Scan 11111111-1111-1111-1111-111111111111
High Risk Vulnerabilities 11111111-1111-1111-1111-111111111112
SQL Injection Vulnerabilities 11111111-1111-1111-1111-111111111113
Weak Passwords 11111111-1111-1111-1111-111111111115
Cross-site Scripting Vulnerabilities 11111111-1111-1111-1111-111111111116
Crawl Only 11111111-1111-1111-1111-111111111117
Malware Scan 11111111-1111-1111-1111-111111111120

 

 

腳本原作:子傑。https://www.imzzj.com/2020/05/18/shi-yong-python-gei-awvs13-pi-liang-tian-jia-ren-wu.html


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM