scrapy 自定義command

參考:app

http://www.tuicool.com/articles/UnUBbuJ。scrapy

一、建立commands包。與spider 同級ide

2,新建文件crawlall.py
ui

__author__ = 'fuhan'spa

from scrapy.commands import ScrapyCommandcode

from scrapy.crawler import CrawlerRunnerorm

from scrapy.utils.conf import arglist_to_dictget

class Command(ScrapyCommand):it

  requires_project = Trueio

  def syntax(self):

    return '[options]'

  def short_desc(self):

    return 'Runs all of the spiders'

  def add_options(self, parser):

    ScrapyCommand.add_options(self, parser)

    parser.add_option("-a", dest="spargs", action="append", default=[], metavar="NAME=VALUE",

              help="set spider argument (may be repeated)")

    parser.add_option("-o", "--output", metavar="FILE",

              help="dump scraped items into FILE (use - for stdout)")

    parser.add_option("-t", "--output-format", metavar="FORMAT",

              help="format to use for dumping items with -o")

  def process_options(self, args, opts):

    ScrapyCommand.process_options(self, args, opts)

    try:

      opts.spargs = arglist_to_dict(opts.spargs)

    except ValueError:

      raise UsageError("Invalid -a value, use -a NAME=VALUE", print_help=False)

  def run(self, args, opts):

    #settings = get_project_settings()


    spider_loader = self.crawler_process.spider_loader

    for spidername in args or spider_loader.list():

      print "*********cralall spidername************" + spidername

      self.crawler_process.crawl(spidername, **opts.spargs)

    self.crawler_process.start()

這裏主要是用了self.crawler_process.spider_loader.list()方法獲取項目下全部的spider,而後利用self.crawler_process.crawl運行spider

3,在settings.py中添加配置:

COMMANDS_MODULE = 'you_name.commands'
相關文章
相關標籤/搜索