How To Build Scrapy Environment

YetToCome wjw15129 at gmail.com
Mon Sep 23 04:55:28 EDT 2013


在 2013年9月23日星期一UTC+8下午4时12分21秒,YetToCome写道:
> I have already install Twisted, zope.interface, w3lib, libxml2, etc,but it still can not be built,here is the error message:
> 
> 
> 
> Traceback (most recent call last):
> 
>   File "C:\Python27\lib\runpy.py", line 162, in _run_module_as_main
> 
>     "__main__", fname, loader, pkg_name)
> 
>   File "C:\Python27\lib\runpy.py", line 72, in _run_code
> 
>     exec code in run_globals
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 167, in <module>
> 
>     execute()
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 142, in execute
> 
>     _run_print_help(parser, _run_command, cmd, args, opts)
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 88, in _run_print
> 
> _help
> 
>     func(*a, **kw)
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 149, in _run_comm
> 
> and
> 
>     cmd.run(args, opts)
> 
>   File "C:\Python27\lib\site-packages\scrapy\commands\crawl.py", line 47, in run
> 
>     crawler = self.crawler_process.create_crawler()
> 
>   File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 142, in create_cr
> 
> awler
> 
>     self.crawlers[name] = Crawler(self.settings)
> 
>   File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 23, in __init__
> 
>     self.spiders = spman_cls.from_crawler(self)
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 35, in from
> 
> _crawler
> 
>     sm = cls.from_settings(crawler.settings)
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 31, in from
> 
> _settings
> 
>     return cls(settings.getlist('SPIDER_MODULES'))
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 22, in __in
> 
> it__
> 
>     for module in walk_modules(name):
> 
>   File "C:\Python27\lib\site-packages\scrapy\utils\misc.py", line 66, in walk_mo
> 
> dules
> 
>     submod = __import__(fullpath, {}, {}, [''])
> 
>   File "blog_crawl\spiders\dmoz_spider.py", line 1, in <module>
> 
>     class DmozSpider(BaseSpider):
> 
> NameError: name 'BaseSpider' is not defined
> 
> 
> 
> someone tells me why, pls



在 2013年9月23日星期一UTC+8下午4时12分21秒,YetToCome写道:
> I have already install Twisted, zope.interface, w3lib, libxml2, etc,but it still can not be built,here is the error message:
> 
> 
> 
> Traceback (most recent call last):
> 
>   File "C:\Python27\lib\runpy.py", line 162, in _run_module_as_main
> 
>     "__main__", fname, loader, pkg_name)
> 
>   File "C:\Python27\lib\runpy.py", line 72, in _run_code
> 
>     exec code in run_globals
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 167, in <module>
> 
>     execute()
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 142, in execute
> 
>     _run_print_help(parser, _run_command, cmd, args, opts)
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 88, in _run_print
> 
> _help
> 
>     func(*a, **kw)
> 
>   File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 149, in _run_comm
> 
> and
> 
>     cmd.run(args, opts)
> 
>   File "C:\Python27\lib\site-packages\scrapy\commands\crawl.py", line 47, in run
> 
>     crawler = self.crawler_process.create_crawler()
> 
>   File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 142, in create_cr
> 
> awler
> 
>     self.crawlers[name] = Crawler(self.settings)
> 
>   File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 23, in __init__
> 
>     self.spiders = spman_cls.from_crawler(self)
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 35, in from
> 
> _crawler
> 
>     sm = cls.from_settings(crawler.settings)
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 31, in from
> 
> _settings
> 
>     return cls(settings.getlist('SPIDER_MODULES'))
> 
>   File "C:\Python27\lib\site-packages\scrapy\spidermanager.py", line 22, in __in
> 
> it__
> 
>     for module in walk_modules(name):
> 
>   File "C:\Python27\lib\site-packages\scrapy\utils\misc.py", line 66, in walk_mo
> 
> dules
> 
>     submod = __import__(fullpath, {}, {}, [''])
> 
>   File "blog_crawl\spiders\dmoz_spider.py", line 1, in <module>
> 
>     class DmozSpider(BaseSpider):
> 
> NameError: name 'BaseSpider' is not defined
> 
> 
> 
> someone tells me why, pls

it had an another error: No module named queuelib, but i have installed all the libs mentioned in that passage...

2013-09-23 16:44:17+0800 [scrapy] INFO: Scrapy 0.18.2 started (bot: tutorial)
2013-09-23 16:44:17+0800 [scrapy] DEBUG: Optional features available: ssl, http1
1, libxml2
2013-09-23 16:44:17+0800 [scrapy] DEBUG: Overridden settings: {'NEWSPIDER_MODULE
': 'tutorial.spiders', 'SPIDER_MODULES': ['tutorial.spiders'], 'BOT_NAME': 'tuto
rial'}
2013-09-23 16:44:17+0800 [scrapy] DEBUG: Enabled extensions: LogStats, TelnetCon
sole, CloseSpider, WebService, CoreStats, SpiderState
Traceback (most recent call last):
  File "C:\Python27\lib\runpy.py", line 162, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "C:\Python27\lib\runpy.py", line 72, in _run_code
    exec code in run_globals
  File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 167, in <module>
    execute()
  File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 142, in execute
    _run_print_help(parser, _run_command, cmd, args, opts)
  File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 88, in _run_print
_help
    func(*a, **kw)
  File "C:\Python27\lib\site-packages\scrapy\cmdline.py", line 149, in _run_comm
and
    cmd.run(args, opts)
  File "C:\Python27\lib\site-packages\scrapy\commands\crawl.py", line 50, in run

    self.crawler_process.start()
  File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 93, in start
    if self.start_crawling():
  File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 168, in start_cra
wling
    return self.start_crawler() is not None
  File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 151, in start_cra
wler
    crawler.configure()
  File "C:\Python27\lib\site-packages\scrapy\crawler.py", line 45, in configure
    self.engine = ExecutionEngine(self, self._spider_closed)
  File "C:\Python27\lib\site-packages\scrapy\core\engine.py", line 61, in __init
__
    self.scheduler_cls = load_object(self.settings['SCHEDULER'])
  File "C:\Python27\lib\site-packages\scrapy\utils\misc.py", line 40, in load_ob
ject
    raise ImportError, "Error loading object '%s': %s" % (path, e)
ImportError: Error loading object 'scrapy.core.scheduler.Scheduler': No module n
amed queuelib



More information about the Python-list mailing list