這類反爬比起前篇講的加速樂簡單很多,筆者遇到的網站的cookie由兩個參數組成,即從返回的js中,將這兩個cookie參數找出。
如下圖,是該網站返回內容,一眼可以看到document.cookie,熟悉的應該都知道,這段js可以生成我們所需要的cookie。
將代碼復制到webstom中,新建html文件,將其格式化,並將JS代碼提取出來,並對其進行分析清洗(去除無效參數函數等)
改動前js:
!function () { var o = "00000000ba3dc2781e265cb031f086a4c2e00e2c4a686a4c2e00e2c4a688"; function k(e, t) { var o = new Date; o.setTime(o.getTime() + 864e5), document.cookie = e + "=" + escape(t) + ";expires=" + o.toGMTString() + ";path=/" } function e(e, t) { return parseInt(e + "", t).toString(16) } function t() { var e = window.OfflineAudioContext || window.webkitOfflineAudioContext; if (null == e) return "nt"; var t = new e(1, 44100, 44100), n = t.createOscillator(), o; n.type = "triangle"; o = t.createDynamicsCompressor(); void 0 !== o.ratio && "function" == typeof o.ratio.setValueAtTime && o.ratio.setValueAtTime(12, t.currentTime), n.connect(o), o.connect(t.destination), n.start(0), t.startRendering(), t.oncomplete = function () { var e; try { e = 1838.1212, n.disconnect(), o.disconnect() } catch (t) { return t } return e } } var n, a, l, d, u, x, i = "5df3dc4b7aaea8a", r = "VudF9fX19lEVENT"; k(i, o); l = document.documentMode || "nt" !== t() ? "1" : "0", d = "nt" === (window.screen.colorDepth || "nt") ? "0" : "1", u = o.slice(o.length - 4), a = e(l + d + "11", 2) + "fffffff"; for (var s = o.slice(o.length - 8), m = "", w = 0; 8 > w; w++) m += e("0x" + a.charAt(w) ^ "0x" + s.charAt(w), 10); k(r, u + m), window.location.reload() }()
分析時先整體看一遍,再看情況打斷點。從下圖幾行代碼,可以看出其是生成cookie的關鍵所在。第一行很明顯是告訴我們兩個cookie參數的鍵名是什么。
k(i, o);該行代碼很明顯是第一個參數的生成過程,此時可以在k函數處打斷點,通過調試發現調用前后o的值不變,故第一個參數就是o,我們可以通過正則
來提取。(在調試的過程中會發現,document is not defined錯誤,此時補上doucment={};即可)
第三行代碼看起來挺復雜,但是我們不用管太多,打斷點調試即可。調試發現缺少windows環境,由於我們使用的是nodejs,很明顯是缺少環境的,我們可
以去瀏覽器console處去調試。
將其替換成undefined,此時l的結果已經顯而易見,JS中||只要有一個為True,結果就是True,即l="1",此時t函數也可以直接刪除。
d的調試結果是1,將js中對應代碼替換成"1"。
下面u和m很明顯是nodejs可以執行的,所以可以不用管(第四行也是如此)。
k(r,u+m)很明顯是第二個參數的生成過程,並且就是u+m的值
最后window.location.reload()是重載代碼,可以直接注釋
最終改動后js如下所示:
// !function () { //!表示構建一個立即執行函數 var o = "00000000ba3dc2781e265cb031f086a4c2e00e2c4a686a4c2e00e2c4a688"; //第一個cookie參數 document = {}; //調試發現document未定義,補全 //第一個參數生成函數,調用前后不變 // function k(e, t) { // var o = new Date; // o.setTime(o.getTime() + 864e5), document.cookie = e + "=" + escape(t) + ";expires=" + o.toGMTString() + ";path=/" // } function e(e, t) { return parseInt(e + "", t).toString(16) } // function t() { // var e = window.OfflineAudioContext || window.webkitOfflineAudioContext; // if (null == e) return "nt"; // var t = new e(1, 44100, 44100), n = t.createOscillator(), o; // n.type = "triangle"; // o = t.createDynamicsCompressor(); // void 0 !== o.ratio && "function" == typeof o.ratio.setValueAtTime && o.ratio.setValueAtTime(12, t.currentTime), n.connect(o), o.connect(t.destination), n.start(0), t.startRendering(), t.oncomplete = function () { // var e; // try { // e = 1838.1212, n.disconnect(), o.disconnect() // } catch (t) { // return t // } // return e // } // } function aa() { var o = "00000000b9c7f73b9698498a498ac5e781a34d6f092b2e086a44a680e2cc"; l = '1', d = '1', u = o.slice(o.length - 4), a = e(l + d + "11", 2) + "fffffff"; for (var s = o.slice(o.length - 8), m = "", w = 0; 8 > w; w++) m += e("0x" + a.charAt(w) ^ "0x" + s.charAt(w), 10); return u + m } //window.location.reload() // }()
相關爬蟲代碼:
def parse_cookie(self, response):
resp_url = response.url
resp_meta = copy.deepcopy(response.meta)
resp_text = response.text
try:
if response.status != 200:
o_str = re.findall(r'o="(.*?)";', response.text)[0]
cookie_js = """
function e(e, t) {
return parseInt(e + "", t).toString(16)
}""" + """function aa() {""" + 'var o = "'+ o_str + '";' + """l = '1', d = '1',
u = o.slice(o.length - 4),
a = e(l + d + "11", 2) + "fffffff";
for (var s = o.slice(o.length - 8), m = "", w = 0; 8 > w; w++) m += e("0x" + a.charAt(w) ^ "0x" + s.charAt(w), 10);
return u + m}"""
js_result = pyv8_engine_service(cookie_js, functionName='aa')
cookie = "5df3dc4b7aaea8a=" + o_str + ";" + "VudF9fX19lEVENT=" + js_result
self.headers["cookie"] = cookie
yield scrapy.Request(url=resp_url, method='GET', headers=self.headers, encoding="utf-8",
dont_filter=True, callback=self.parse_cookie1, meta={**resp_meta, "resp_url": resp_url})
else:
yield scrapy.Request(url=resp_url, method='GET', headers=self.headers, encoding="utf-8",
dont_filter=True, callback=self.parse, meta={**resp_meta, "resp_url": resp_url})
except:
traceback.print_exc()
self.logger.info(f"parse error url: {resp_url}")
def parse_cookie1(self, response):
resp_url = response.url
resp_meta = copy.deepcopy(response.meta)
resp_text = response.text
try:
if response.status != 200:
o_str = re.findall(r'o="(.*?)";', response.text)[0]
cookie_js = """
function e(e, t) {
return parseInt(e + "", t).toString(16)
}""" + """function aa() {""" + 'var o = "' + o_str + '";' + """l = '1', d = '1',
u = o.slice(o.length - 4),
a = e(l + d + "11", 2) + "fffffff";
for (var s = o.slice(o.length - 8), m = "", w = 0; 8 > w; w++) m += e("0x" + a.charAt(w) ^ "0x" + s.charAt(w), 10);
return u + m}"""
js_result = pyv8_engine_service(cookie_js,functionName='aa')
cookie = "5df3dc4b7aaea8a=" + o_str + ";" + "VudF9fX19lEVENT=" + js_result
self.headers["cookie"] = cookie
yield scrapy.Request(url=resp_url, method='GET', headers=self.headers, encoding="utf-8",
dont_filter=True, callback=self.parse, meta={**resp_meta, "resp_url": resp_url})
except:
traceback.print_exc()
self.logger.info(f"parse error url: {resp_url}")