代碼如下:
1 import os 2 import time 3 import sys 4 import datetime 5 6 def globalPath():#文件路徑 7 global xrayPath #xray 8 global radPath #rad 9 global urlPath #url資產 10 global domainPath #域名資產 11 global xrayListen #xray監聽端口 12 global gobyPath #goby資產存放文件夾 13 xrayPath = r"D:\sec\Tools\xray\xray\xray.exe" 14 radPath = r"D:\sec\Tools\xray\xray\rad.exe" 15 urlPath = r"D:\sec\Tools\xray\xray\batch_scan\urls.txt" 16 domainPath = r"D:\sec\Tools\xray\xray\batch_scan\domains.txt" 17 xrayListen = "127.0.0.1:7777" 18 gobyPath = r"D:\sec\Tools\xray\xray\batch_scan\gobyAsset\\" 19 20 def getUrl(path):#獲取urls 21 file = open(path) 22 urls = [] 23 for line in file: 24 urls.append(line.strip('\n')) # 移除換行符將url添加到數組 25 file.close() 26 return urls 27 28 def addFiles(pathName):#創建掃描報告文件夾 29 try: 30 filePath = sys.path[0] + "\\" + datetime.datetime.now().strftime('%Y.%m.%d-') + pathName #D:\xxxx\xxxx\batch_scan\2020.11.11-scan_domains\ 31 os.mkdir(filePath) 32 except: 33 pass 34 return filePath 35 36 def scan1():#基礎掃描 37 urls = getUrl(urlPath) 38 filePath = addFiles("(1)-scan_xray\\") 39 sum = 0 40 for url in urls: 41 sum += 1 42 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.html' # 創建的報告名 43 cmd = r'{0} webscan --basic-crawler {1} --html-output {2}'.format(xrayPath, url.replace('\n', ''),filePath + name) # cmd 44 os.system(cmd.replace('\n', '')) 45 time.sleep(1) 46 47 def scan2():#代理掃描 48 filePath = addFiles("(2)-scan_proxy\\") 49 cmd = r'{0} webscan --listen {1} --html-output {2}'.format(xrayPath,xrayListen,filePath + datetime.datetime.now().strftime('%H.%M-') + 'xray_proxy.html') 50 os.system(cmd.replace('\n','')) 51 52 def scan3():#子域名爆破 53 urls = getUrl(domainPath) 54 filePath = addFiles("(3)-scan_subdomain\\") 55 sum = 0 56 for url in urls: 57 sum += 1 58 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.txt' # 創建的報告名 1,www.baidu.con.txt 59 cmd = r'{0} subdomain --target {1} --text-output {2}'.format(xrayPath,url.replace('\n', ''),filePath + name) # cmd 60 os.system(cmd.replace('\n', '')) 61 time.sleep(1) 62 63 def scan4():#rad爬蟲 64 urls = getUrl(urlPath) 65 filePath = addFiles("(4)-scan_rad\\") 66 sum = 0 67 for url in urls: 68 sum += 1 69 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.txt' # 創建的報告名 70 cmd = r'{0} -t {1} -text-output {2}'.format(radPath,url.replace('\n', ''),filePath + name) # cmd 71 os.system(cmd.replace('\n', '')) 72 time.sleep(1) 73 74 def scan5():#rad_xray爬蟲掃描 75 urls = getUrl(urlPath) 76 filePath = addFiles("(5)-scan_rad_xray\\") 77 sum = 0 78 for url in urls: 79 sum += 1 80 print(str(sum)+'、'+url) 81 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.html' # 創建的報告名 82 cmd = r'{0} webscan --browser-crawler {1} --html-output {2}'.format(xrayPath, url.replace('\n', ''),filePath + name) # cmd 83 os.system(cmd.replace('\n', '')) 84 time.sleep(1) 85 86 def scan6():#rad_burp_xray聯動掃描 87 urls = getUrl(urlPath) 88 filePath = addFiles("(6)-scan_rad_burp_xray\\") 89 cmd = r'{0} webscan --listen {1} --html-output {2}'.format(xrayPath,xrayListen,filePath + datetime.datetime.now().strftime( '%H.%M-') + 'xray_proxy.html') 90 os.system(r"cmd /c start " + cmd) 91 time.sleep(5) 92 sum = 0 93 for url in urls: 94 sum += 1 95 name = str(sum) + ',' + url.replace('https://', '').replace('http://','').replace('/','').replace('\n','').replace(':','-').rstrip() + '.txt' #創建的爬蟲文件名 96 radcmd = r'{0} -t {1} --http-proxy 127.0.0.1:8080 -text-output {2}'.format(radPath, url.replace('\n', ''), filePath + name) # cmd 97 os.system(radcmd.replace('\n', '')) 98 time.sleep(1) 99 100 def scan7():#subdomain_rad_burp_xray聯動掃描 101 urls = getUrl(domainPath) 102 filePath = addFiles("(7)-scan_subdomain_rad_burp_xray\\") 103 domainList = []#多個子域名信息 104 sum = 0 105 for url in urls: 106 sum += 1 107 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '_subdomains.txt' # 創建的報告名 1,www.baidu.con.txt +++++++++ 108 cmd = r'{0} subdomain --target {1} --text-output {2}'.format(xrayPath, url.replace('\n', ''),filePath + name) # cmd 109 os.system(cmd.replace('\n', '')) 110 time.sleep(1) 111 r = open(r'{0}'.format(filePath+name)) 112 for i in r: 113 domainList.append(i.split(',')[0]) 114 r.close 115 cmd = r'{0} webscan --listen {1} --html-output {2}'.format(xrayPath, xrayListen,filePath + datetime.datetime.now().strftime('%H.%M-') + 'xray_proxy.html') 116 os.system(r"cmd /c start " + cmd) 117 time.sleep(5) 118 sum = 0 119 for domain in domainList: 120 sum += 1 121 name = str(sum) + ',' + domain.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.txt' # 創建的爬蟲文件名 122 radcmd = r'{0} -t {1} --http-proxy 127.0.0.1:8080 -text-output {2}'.format(radPath, domain.replace('\n', ''),filePath + name) # cmd 123 os.system(radcmd.replace('\n', '')) 124 time.sleep(1) 125 126 127 def scan8():#goby_rad_burp_xray聯動掃描 128 import openpyxl #處理goby的xlsx模塊 129 gobyXlsxs = [] #多個goby資產表 130 for i in os.listdir(gobyPath): 131 if (gobyPath + i).endswith(".xlsx"):#如果是xlsx文件后綴則添加到gobyXlsxs 132 gobyXlsxs.append(openpyxl.load_workbook(gobyPath + i).active) 133 urls = []#goby資產信息 134 for gobyXlsx in gobyXlsxs: 135 for i in gobyXlsx['A2:A'+ str(gobyXlsx.max_row)]: 136 for j in i: 137 port = gobyXlsx['B'+ str(j.row)].value.split(',') #端口 138 proto = gobyXlsx['C'+ str(j.row)].value.split(',') #協議 139 for index in range(len(port)): 140 if port[index] == '443': 141 asset = 'https://'+ j.value 142 urls.append(asset) 143 elif proto[index] == 'web' or proto[index] == 'http': 144 asset = 'http://' + j.value + ':' + port[index] 145 urls.append(asset) 146 f = open(gobyPath + 'goby_asset.txt','w+') 147 for i in urls: 148 f.write(i+'\n')#資產寫入txt 149 f.seek(0,0) #指針回到初始點 150 f.close 151 filePath = addFiles("(7)-scan_goby_rad_burp_xray\\") 152 cmd = r'{0} webscan --listen {1} --html-output {2}'.format(xrayPath, xrayListen,filePath + datetime.datetime.now().strftime('%H.%M-') + 'xray_proxy.html') 153 os.system(r"cmd /c start " + cmd) 154 time.sleep(5) 155 sum = 0 156 for url in urls: 157 sum += 1 158 name = str(sum) + ',' + url.replace('https://', '').replace('http://', '').replace('/', '').replace('\n','').replace(':', '-').rstrip() + '.txt' # 創建的爬蟲文件名 159 radcmd = r'{0} -t {1} --http-proxy 127.0.0.1:8080 -text-output {2}'.format(radPath, url.replace('\n',''),filePath + name) # cmd 160 os.system(radcmd.replace('\n', '')) 161 time.sleep(1) 162 163 164 def start(argv): 165 if argv == '1': 166 scan1() 167 elif argv == '2': 168 scan2() 169 elif argv == '3': 170 scan3() 171 elif argv == '4': 172 scan4() 173 elif argv == '5': 174 scan5() 175 elif argv == '6': 176 scan6() 177 elif argv == '7': 178 scan7() 179 elif argv == '8': 180 scan8() 181 else: 182 print(" 參數錯誤!!! python3 scan.py 1-8") 183 def outprint():#命令行輸出 184 print("""———————————————————————————————————— 185 1 xray基礎掃描 186 187 2 xray代理掃描 188 189 3 xray子域名爆破 190 191 4 rad爬蟲 192 193 5 rad_xray爬蟲掃描 194 195 6 rad_burp_xray聯動掃描 196 197 7 subdomain_rad_burp_xray聯動掃描 198 199 8 goby_rad_burp_xray聯動掃描 200 ————————————————————————————————————""") 201 202 if __name__ == "__main__": 203 globalPath() 204 if len(sys.argv) == 2: 205 outprint() 206 start(sys.argv[1]) 207 else: 208 outprint() 209 print(' 格式錯誤!!! python3 scan.py 1-8')
- 工具下載鏈接
Xray安裝使用說明:https://docs.xray.cool/#/tutorial/introduce
Rad安裝使用說明:https://github.com/chaitin/rad
Goby安裝使用說明:https://gobies.org/
Burp 敏感信息插件:https://github.com/ScriptKid-Beta/Unexpected_information/releases
Burp Shiro掃描插件:https://github.com/amad3u4/ShiroScanner
Burp Struts掃描插件:https://github.com/prakharathreya/Struts2-RCE
Burp Fastjson掃描插件:https://github.com/p1g3/Fastjson-Scanner
Burp J2EE掃描插件:https://github.com/PortSwigger/j2ee-scan
- 腳本功能介紹
1、xray批量基礎掃描:python3 scan.py 1
2、xray代理掃描:python3 scan.py 2
3、xray批量子域名爆破:python3 scan.py 3
4、rad批量爬蟲:python3 scan.py 4
5、rad_xray批量爬蟲掃描:python3 scan.py 5
6、rad_burp_xray批量掃描:python3 scan.py 6
7、subdomain_rad_burp_xray批量掃描:python3 scan.py 7
8、goby_rad_burp_xray批量掃描:python3 scan.py 8
- 注意事項
1、rad、xray需放在同一目錄下
2、解決批量掃描時命令行卡住BUG
右鍵Powershell屬性-選項-取消勾選’快速編輯模式’
3、修改xray配置文件config.yaml(如沒有運行一次工具會自動生成),掃描子域名時只會顯示web存活的域名
4、配置scan.py文件路徑,填寫對應工具絕對路徑即可
5、urls.txt文件格式
6、domain.txt文件內容格式
- 使用方法
一、Xray批量基礎掃描
python3 scan.py 1
Xray的基礎頁面爬蟲掃描,需把所有目標url按行入urls.txt,腳本執行結果等同於:
xray.exe webscan --basic-crawler http://xxxx.com --html-output xxxx.html
二、Xray代理掃描
python3 scan.py 2
Xray的代理掃描,可以與burp聯動進行登陸后台掃描,腳本執行結果等同於
xray.exe webscan --listen 127.0.0.1:7777 --html-output xxxx.html
三、Xray批量子域名爆破
python3 scan.py 3
Xray自帶subdomain子域名爆破功能,用於爆破單個或多個子域名信息,需把所有域名按行寫入domain.txt中,執行結果等同於:
xray.exe subdomain --target xxxx.com --text-output xxxx.txt
四、Rad批量爬蟲
python3 scan.py 4
用於批量爬取所有目標站點的鏈接信息,需把所有目標url按行寫入urls.txt,執行結果等同於:
rad.exe -t xxxx.com -text-output xxxx.txt
五、Rad_Xray批量爬蟲掃描
python3 scan.py 5
Xray高級版自帶聯動rad與xray掃描功能,需把所有目標url按行寫入urls.txt,執行結果等同於:
xray.exe webscan --browser-crawler xxxx.com –html-output xxxx.html
六、Rad_Burp_Xray批量掃描
python3 scan.py 6
rad、xray聯動burp進行主動掃描,需把所有目標url按行寫入urls.txt,執行流程:
1.rad批量爬取urls.txt所有目標並將流量發送到burp的8080端口
2.burp將收到的流量發送到xray的7777端口,並使用擴展插件(敏感信息抓取、shiro、weblogic、fastjson插件等等)對爬取到的流量進行掃描,此處需配置Burp-User options-Upstream Proxy Servers流量轉發ip及端口
3.xray開啟7777端口對發送來的流量進行漏洞掃描,此功能會有兩個窗口,一個為rad爬蟲窗口,一個為xray監聽窗口
七、Subdomain_rad_burp_xray批量掃描
python3 scan.py 7
需要把所有根域名信息按行寫入domains.txt
執行流程:
1.通過xray的subdomain爆破domains.txt所有目標子域名
2.rad爬取爆破出來的所有子域名並將流量發送到burp
3.burp將收到的流量發送到xray的7777端口,並使用擴展插件(敏感信息抓取、shiro、weblogic、fastjson插件等等)對爬取到的流量進行掃描,此處需配置Burp-User options-Upstream Proxy Servers流量轉發ip及端口127.0.0.1:7777
4.xray收到burp發送來的流量進行漏洞掃描
八、Goby_Rad_Burp_Xray批量掃描
python3 scan.py 8
使用Goby對所有目標站點進行資產探測,pip install openpyxl使用openpyxl庫解析goby導出的xlsx文件,再使用rad、burp、xray對所有資產進行掃描,執行流程:
1. 使用Goby進行資產探測后,導出資產excel表到gobyAsset文件夾
2. 將導出的goby資產全部解析成http、https/IP/端口的格式
3. 使用rad對解析后的所有資產進行爬蟲,並將流量發送到burp的8080端口
4. burp將收到的流量發送到xray的7777端口,並使用擴展插件(敏感信息抓取、shiro、weblogic、fastjson插件等等)對爬取到的流量進行掃描,此處需配置Burp-User options-Upstream Proxy Servers流量轉發ip及端口127.0.0.1:7777
5. xray收到burp發送來的流量進行漏洞掃描