1. 保存信息如下
D:python3BR16>scrapy crawl BR16B -L WARNING Unhandled error in Deferred: 2016-10-10 14:19:05 [twisted] CRITICAL: Unhandled error in Deferred: Traceback (most recent call last): File "d:python35libsite-packagesscrapycommandscrawl.py", line 57, in run self.crawler_process.crawl(spname, **opts.spargs) File "d:python35libsite-packagesscrapycrawler.py", line 163, in crawl return self._crawl(crawler, *args, **kwargs) File "d:python35libsite-packagesscrapycrawler.py", line 167, in _crawl d = crawler.crawl(*args, **kwargs) File "d:python35libsite-packages wistedinternetdefer.py", line 1274, in unwindGenerator return _inlineCallbacks(None, gen, Deferred()) --- <exception caught here> --- File "d:python35libsite-packages wistedinternetdefer.py", line 1128, in _inlineCallbacks result = g.send(result) File "d:python35libsite-packagesscrapycrawler.py", line 72, in crawl self.engine = self._create_engine() File "d:python35libsite-packagesscrapycrawler.py", line 97, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "d:python35libsite-packagesscrapycoreengine.py", line 68, in __init__ self.downloader = downloader_cls(crawler) File "d:python35libsite-packagesscrapycoredownloader\__init__.py", line 88, in __init__ self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "d:python35libsite-packagesscrapymiddleware.py", line 58, in from_crawler return cls.from_settings(crawler.settings, crawler) File "d:python35libsite-packagesscrapymiddleware.py", line 34, in from_settings mwcls = load_object(clspath) File "d:python35libsite-packagesscrapyutilsmisc.py", line 44, in load_object mod = import_module(module) File "d:python35libimportlib\__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "<frozen importlib._bootstrap>", line 986, in _gcd_import File "<frozen importlib._bootstrap>", line 969, in _find_and_load File "<frozen importlib._bootstrap>", line 958, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 673, in _load_unlocked File "<frozen importlib._bootstrap_external>", line 665, in exec_module File "<frozen importlib._bootstrap>", line 222, in _call_with_frames_removed File "d:python35libsite-packagesscrapydownloadermiddlewares etry.py", line 23, in <module> from scrapy.xlib.tx import ResponseFailed File "d:python35libsite-packagesscrapyxlib x\__init__.py", line 3, in <module> from twisted.web import client File "d:python35libsite-packages wistedwebclient.py", line 42, in <module> from twisted.internet.endpoints import TCP4ClientEndpoint, SSL4ClientEndpoint File "d:python35libsite-packages wistedinternetendpoints.py", line 34, in <module> from twisted.internet.stdio import StandardIO, PipeAddress File "d:python35libsite-packages wistedinternetstdio.py", line 30, in <module> from twisted.internet import _win32stdio builtins.ImportError: cannot import name '_win32stdio' 2016-10-10 14:19:06 [twisted] CRITICAL: Traceback (most recent call last): File "d:python35libsite-packages wistedinternetdefer.py", line 1128, in _inlineCallbacks result = g.send(result) File "d:python35libsite-packagesscrapycrawler.py", line 72, in crawl self.engine = self._create_engine() File "d:python35libsite-packagesscrapycrawler.py", line 97, in _create_engine return ExecutionEngine(self, lambda _: self.stop()) File "d:python35libsite-packagesscrapycoreengine.py", line 68, in __init__ self.downloader = downloader_cls(crawler) File "d:python35libsite-packagesscrapycoredownloader\__init__.py", line 88, in __init__ self.middleware = DownloaderMiddlewareManager.from_crawler(crawler) File "d:python35libsite-packagesscrapymiddleware.py", line 58, in from_crawler return cls.from_settings(crawler.settings, crawler) File "d:python35libsite-packagesscrapymiddleware.py", line 34, in from_settings mwcls = load_object(clspath) File "d:python35libsite-packagesscrapyutilsmisc.py", line 44, in load_object mod = import_module(module) File "d:python35libimportlib\__init__.py", line 126, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "<frozen importlib._bootstrap>", line 986, in _gcd_import File "<frozen importlib._bootstrap>", line 969, in _find_and_load File "<frozen importlib._bootstrap>", line 958, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 673, in _load_unlocked File "<frozen importlib._bootstrap_external>", line 665, in exec_module File "<frozen importlib._bootstrap>", line 222, in _call_with_frames_removed File "d:python35libsite-packagesscrapydownloadermiddlewares etry.py", line 23, in <module> from scrapy.xlib.tx import ResponseFailed File "d:python35libsite-packagesscrapyxlib x\__init__.py", line 3, in <module> from twisted.web import client File "d:python35libsite-packages wistedwebclient.py", line 42, in <module> from twisted.internet.endpoints import TCP4ClientEndpoint, SSL4ClientEndpoint File "d:python35libsite-packages wistedinternetendpoints.py", line 34, in <module> from twisted.internet.stdio import StandardIO, PipeAddress File "d:python35libsite-packages wistedinternetstdio.py", line 30, in <module> from twisted.internet import _win32stdio ImportError: cannot import name '_win32stdio' D:python3BR16>
解决方案:
solutions Scrapy can work with Python 3 on windows if you make some minor adjustments: Copy the _win32stdio and _pollingfile to the appropriate directory under site-packages. Namely, twisted-dirinternet. Download these fromhttps://github.com/twisted/twisted/tree/trunk/twisted/internet URLS: https://github.com/twisted/twisted/blob/trunk/src/twisted/internet/_win32stdio.py URLS: https://github.com/twisted/twisted/blob/trunk/src/twisted/internet/_pollingfile.py pip install pypiwin32 Granted, this is based on my personal experience. Because the repository will certainly change in the future, readers should beware the age of this answer. 来源: http://stackoverflow.com/questions/37342603/importerror-cannot-import-name-win32stdio