• 实测好用的awvs批量添加任务脚本


    #! /usr/bin/env python  
    # -*- coding:utf-8 -*-  
    # __author__ : "Ca1m" 
    # Time : 2020/11/14 15:21
    
    
    import requests
    import json
    from requests.packages.urllib3.exceptions import InsecureRequestWarning
    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    
    
    apikey = '1986ad8c0a5b3df4d7028d5f3c06e936c61d7af90a46243638a6adb24b99f61ee'#API
    headers = {'Content-Type': 'application/json',"X-Auth": apikey}
    
    
    def addTask(url,target):
        try:
            url = ''.join((url, '/api/v1/targets/add'))
            data = {"targets":[{"address": target,"description":""}],"groups":[]}
            r = requests.post(url, headers=headers, data=json.dumps(data), timeout=30, verify=False)
            result = json.loads(r.content.decode())
            return result['targets'][0]['target_id']
        except Exception as e:
            return e
    def scan(url,target,Crawl,user_agent,profile_id,proxy_address,proxy_port):
        scanUrl = ''.join((url, '/api/v1/scans'))
        target_id = addTask(url,target)
    
        if target_id:
            data = {"target_id": target_id, "profile_id": profile_id, "incremental": False, "schedule": {"disable": False, "start_date": None, "time_sensitive": False}}
            try:
                configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent)
                response = requests.post(scanUrl, data=json.dumps(data), headers=headers, timeout=30, verify=False)
                result = json.loads(response.content)
                return result['target_id']
            except Exception as e:
                print(e)
    
    def configuration(url,target_id,proxy_address,proxy_port,Crawl,user_agent):
        configuration_url = ''.join((url,'/api/v1/targets/{0}/configuration'.format(target_id)))
        data = {"scan_speed":"fast","login":{"kind":"none"},"ssh_credentials":{"kind":"none"},"sensor": False,"user_agent": user_agent,"case_sensitive":"auto","limit_crawler_scope": True,"excluded_paths":[],"authentication":{"enabled": False},"proxy":{"enabled": Crawl,"protocol":"http","address":proxy_address,"port":proxy_port},"technologies":[],"custom_headers":[],"custom_cookies":[],"debug":False,"client_certificate_password":"","issue_tracker_id":"","excluded_hours_id":""}
        r = requests.patch(url=configuration_url,data=json.dumps(data), headers=headers, timeout=30, verify=False)
    def main():
        Crawl = False
        proxy_address = '127.0.0.1'
        proxy_port = '8888'
        awvs_url = 'https://192.168.21.130:13443/' #awvs url
        with open('url.txt','r',encoding='utf-8') as f:
            targets = f.readlines()
        profile_id = "11111111-1111-1111-1111-111111111111"
        user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.21 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.21" #扫描默认UA头
        if Crawl:
            profile_id = "11111111-1111-1111-1111-111111111117"
        for target in targets:
            target = target.strip()
            if scan(awvs_url,target,Crawl,user_agent,profile_id,proxy_address,int(proxy_port)):
                print("{0} 添加成功".format(target))
    
    if __name__ == '__main__':
        main()

    配置

    • 将apikey替换为自己的
    • 将需要扫描的url放入到脚本同一目录下的url.txt
    • 将awvs_url改为自己awvs地址
    • 如果和需要和X-RAY配合使用可自行修改 将Crawl 改为 True
    • proxy_address 改为代理地址
    • proxy_port 改为代理端口
    • 如果还想使用其他扫描类型可以自行修改profile_id
    • 如果想修改UA头自行修改user_agent

    AWVS13扫描类型scanTypes

    类型profile_id
    Full Scan 11111111-1111-1111-1111-111111111111
    High Risk Vulnerabilities 11111111-1111-1111-1111-111111111112
    SQL Injection Vulnerabilities 11111111-1111-1111-1111-111111111113
    Weak Passwords 11111111-1111-1111-1111-111111111115
    Cross-site Scripting Vulnerabilities 11111111-1111-1111-1111-111111111116
    Crawl Only 11111111-1111-1111-1111-111111111117
    Malware Scan 11111111-1111-1111-1111-111111111120

    脚本原作:子杰。https://www.imzzj.com/2020/05/18/shi-yong-python-gei-awvs13-pi-liang-tian-jia-ren-wu.html

  • 相关阅读:
    JSP(7)—EL和JSTL
    JSP(6)—JavaBean及案例
    JSP(5)—Session的创建以及简单使用
    JSP(4)—Cookie创建及简单案例(自动登录)
    JSP(3)—Cookie和Session
    JSP(2)—绝对路径与相对路径、配置Servlet与Servlet注解
    JSP(1)—基础知识
    Dockerfile RUN mkdir xxx 的时候报了Permission denied
    摘抄:Solr和ElasticSearch的区别
    POST请求反向代理设置
  • 原文地址:https://www.cnblogs.com/ly584521/p/13973431.html
Copyright © 2020-2023  润新知