|
5鱼币
网上找了很多方法都没有用.... 实在不行了
scrapy crawl books -o books.csv后出错
源代码
- #-*-coding:utf-8-*-
- import scrapy
- class BooksSpider(scrapy.Spider):
- #every marks of spider
- name = "books"
- #define beginning point
- start_urls = ['http://books.toscrape.com/']
- def parse(self,response):
- # get data
- # details in <article class = "product pod">
- for book in response.css('article.product_pod'):
- name = book.xpath('./h3/a/@title').extract_first()
- price = book.css('p.price_color::text').extract_first()
- yield {
- 'name':name,
- 'price':price,
- }
- next_url = response.css('ul.pager li.next a::attr(href)').extract_first()
- yield scrapy.Request(next_url,callback=self.parse)
复制代码
出错提示:
- 2017-11-12 13:58:04 [scrapy.utils.log] INFO: Scrapy 1.4.0 started (bot: tutorial)
- 2017-11-12 13:58:04 [scrapy.utils.log] INFO: Overridden settings: {'BOT_NAME': 'tutorial', 'NEWSPIDER_MODULE': 'tutorial.spiders', 'ROBOTSTXT_OBEY': True, 'SPIDER_MODULES': ['tutorial.spiders']}
- Traceback (most recent call last):
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/spiderloader.py", line 69, in load
- return self._spiders[spider_name]
- KeyError: 'tutorial'
- During handling of the above exception, another exception occurred:
- Traceback (most recent call last):
- File "/Users/onec/anaconda3/bin/scrapy", line 11, in <module>
- sys.exit(execute())
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/cmdline.py", line 109, in execute
- settings = get_project_settings()
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/utils/project.py", line 68, in get_project_settings
- settings.setmodule(settings_module_path, priority='project')
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/settings/__init__.py", line 292, in setmodule
- module = import_module(module)
- File "/Users/onec/anaconda3/lib/python3.6/importlib/__init__.py", line 126, in import_module
- return _bootstrap._gcd_import(name[level:], package, level)
- File "<frozen importlib._bootstrap>", line 994, in _gcd_import
- File "<frozen importlib._bootstrap>", line 971, in _find_and_load
- File "<frozen importlib._bootstrap>", line 941, in _find_and_load_unlocked
- File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
- File "<frozen importlib._bootstrap>", line 994, in _gcd_import
- File "<frozen importlib._bootstrap>", line 971, in _find_and_load
- File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
- File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
- File "<frozen importlib._bootstrap_external>", line 678, in exec_module
- File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
- File "/Users/onec/tutorial/tutorial/__init__.py", line 2, in <module>
- cmdline.execute("scrapy crawl tutorial".split())
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/cmdline.py", line 149, in execute
- _run_print_help(parser, _run_command, cmd, args, opts)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/cmdline.py", line 89, in _run_print_help
- func(*a, **kw)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/cmdline.py", line 156, in _run_command
- cmd.run(args, opts)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/commands/crawl.py", line 57, in run
- self.crawler_process.crawl(spname, **opts.spargs)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/crawler.py", line 167, in crawl
- crawler = self.create_crawler(crawler_or_spidercls)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/crawler.py", line 195, in create_crawler
- return self._create_crawler(crawler_or_spidercls)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/crawler.py", line 199, in _create_crawler
- spidercls = self.spider_loader.load(spidercls)
- File "/Users/onec/anaconda3/lib/python3.6/site-packages/scrapy/spiderloader.py", line 71, in load
- raise KeyError("Spider not found: {}".format(spider_name))
- KeyError: 'Spider not found: tutorial'
- one-2:tutorial onec$
复制代码 |
|