1、下载中间件添加cookie
修改middlewares.py文件下xxxDownloaderMiddleware方法process_request
class CxrDownloaderMiddleware:
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# # 在这里添加cookie到请求头
cookies = {
'_gid': 'GA1.2.1669620947.1730686665',
'_gat_gtag_UA_87592301_7': '1',
'_ga_YKC8ZQQ4FF': 'GS1.1.1730699582.2.1.1730700166.0.0.0',
'_ga': 'GA1.1.1502111631.1730686665',
# 添加更多cookie
}
request.cookies.update(cookies)
# print(f'下载中间件添加cookie:{cookies}')
return None
有一点需要注意,如果是在pipelines管道内自己写的请求内容,在中间件加cookie是不管用的,需要自己手动加上cookie
待续。。