9.py 1.9 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. import requests
  2. import re
  3. import simhash
  4. url = "https://www.qimao.com/shuku/a-a-a-a-a-a-a-click-1/"
  5. def get_hexxor(s1, _0x4e08d8):
  6. _0x5a5d3b = ''
  7. for i in range(len(s1)):
  8. if i % 2 != 0: continue
  9. _0x401af1 = int(s1[i: i+2], 16)
  10. _0x105f59 = int(_0x4e08d8[i: i+2], 16)
  11. _0x189e2c_10 = (_0x401af1 ^ _0x105f59)
  12. print("i==",_0x401af1,_0x105f59,_0x189e2c_10)
  13. print("i==",hex(_0x401af1),hex(_0x105f59),hex(_0x189e2c_10))
  14. _0x189e2c = hex(_0x189e2c_10)[2:]
  15. print("_0x189e2c",_0x189e2c)
  16. if len(_0x189e2c) == 1:
  17. _0x189e2c = '0' + _0x189e2c
  18. print("_0x189e2c",_0x189e2c)
  19. _0x5a5d3b += _0x189e2c
  20. return _0x5a5d3b
  21. def get_unsbox(arg1):
  22. _0x4b082b = [0xf, 0x23, 0x1d, 0x18, 0x21, 0x10, 0x1, 0x26, 0xa, 0x9, 0x13, 0x1f, 0x28, 0x1b, 0x16, 0x17, 0x19, 0xd,
  23. 0x6, 0xb, 0x27, 0x12, 0x14, 0x8, 0xe, 0x15, 0x20, 0x1a, 0x2, 0x1e, 0x7, 0x4, 0x11, 0x5, 0x3, 0x1c,
  24. 0x22, 0x25, 0xc, 0x24]
  25. print(_0x4b082b)
  26. _0x4da0dc = []
  27. _0x12605e = ''
  28. for i in _0x4b082b:
  29. print('i--',i,i-1)
  30. _0x4da0dc.append(arg1[i-1])
  31. _0x12605e = "".join(_0x4da0dc)
  32. return _0x12605e
  33. # 第一次请求获取js代码
  34. headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"}
  35. r = requests.get(url, headers=headers)
  36. # 重js中匹配出 arg1
  37. arg1 = re.findall("arg1=\'(.*?)\'", r.text)[0]
  38. # 参数生成
  39. s1 = get_unsbox(arg1)
  40. print("===",arg1,s1)
  41. _0x4e08d8 = "3000176000856006061501533003690027800375"
  42. _0x12605e = get_hexxor(s1, _0x4e08d8)
  43. print(s1, _0x12605e)
  44. # 二次请求携带cookie 获取html文件
  45. headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36",
  46. "cookie": "acw_sc__v2=%s" % _0x12605e}
  47. r = requests.get(url, headers=headers)
  48. # print(r.text)