1、settings.py
COOKIES_ENABLED = False
DOWNLOAD_DELAY = 3
ROBOTSTXT_OBEY = False
ip代理池设置
IPPOOL = [{'ipadder':'1.1.1.1'},
{'ipadder':'2.1.1.1'},
{'ipadder': '1.3.1.1'},
{'ipadder': '1.1.1.4'},
]
DOWNLOADER_MIDDLEWARES = {
#'Autopjt.middlewares.MyCustomDownloaderMiddleware': 543,
'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware':123,
'Autopjt.middlewares.IPPOOLS':125,
}
2、middlewares.py
import random from Autopjt.settings import IPPOOL from scrapy.contrib.downloadermiddleware.httpproxy import HttpProxyMiddleware class IPPOOLS(HttpProxyMiddleware): def __init__(self,ip=''): self.ip = ip def process_request(self, request, spider): thisip = random.choice(IPPOOL) # 将对应的IP实际添加为具体代理,用该ip进行爬取 request.meta['proxy']='http://' + thisip['ipaddr']
UA池
UAPOOL = ['Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.103 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.104 Safari/537.36',]
DOWNLOADER_MIDDLEWARES = {
#'Autopjt.middlewares.MyCustomDownloaderMiddleware': 543,
#'scrapy.contrib.downloadermiddleware.httpproxy.HttpProxyMiddleware':123,
#'Autopjt.middlewares.IPPOOLS':125,
'scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware':2,
'Autopjt.middlewares.Uamid':1
}
middlewares.py
import random from Autopjt.settings import UAPOOL from scrapy.contrib.downloadermiddleware.useragent import UserAgentMiddleware class Uamid(UserAgentMiddleware): def __init__(self,ua=''): self.ua = ua def process_request(self, request, spider): thisua = random.choice(UAPOOL) request.headers.setdefault('User-Agent',thisua)