62 lines
3.4 KiB
Python
62 lines
3.4 KiB
Python
from scrapy import signals
|
|
from itemadapter import is_item, ItemAdapter
|
|
from scrapy.http import HtmlResponse
|
|
from time import sleep
|
|
import random
|
|
|
|
|
|
class DagongwangproDownloaderMiddleware:
|
|
user_agent_list = [
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
|
|
'Opera/8.0 (Windows NT 5.1; U; en)',
|
|
'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
|
|
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
|
|
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2 ',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
|
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
|
|
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11',
|
|
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
|
|
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)',
|
|
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
|
|
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0) ',
|
|
]
|
|
PROXY_http = ['103.140.126.57:8888',
|
|
'182.34.254.39:25624',
|
|
'39.108.101.55:1080',
|
|
'183.247.199.126:30001',
|
|
'39.175.75.5:30001']
|
|
PROXY_https = []
|
|
|
|
def process_request(self, request, spider):
|
|
# UA伪装
|
|
request.headers['User-Agent'] = random.choice(self.user_agent_list)
|
|
# return None
|
|
request.headers['proxy'] = random.choice(self.PROXY_http)
|
|
return None
|
|
|
|
def process_response(self, request, response, spider):
|
|
bro = spider.bro # 获取爬虫中定义的浏览器对象
|
|
bro.get(request.url) # 对五大板块对应url发起请求
|
|
sleep(2)
|
|
page_text = bro.page_source
|
|
|
|
if request.url in spider.models_urls:
|
|
new_response = HtmlResponse(url=request.url, body=page_text, encoding='utf-8', request=request)
|
|
return new_response
|
|
else:
|
|
return response # 其他请求对应的响应对象
|
|
|
|
def process_exception(self, request, exception, spider):
|
|
# 代理
|
|
if request.url.split(':')[0] == 'http':
|
|
request.meta['proxy'] = 'http://' + random.choice(self.PROXY_http)
|
|
else:
|
|
request.meta['proxy'] = 'https://' + random.choice(self.PROXY_https)
|
|
# 将修正之后的请求对象重新发送
|
|
return request
|