How to pass parameter to a scrapy pipeline object

2019-02-18 18:48发布

After scraping some data with a scrapy spider:

class Test_Spider(Spider):

    name = "test"
    def start_requests(self):
        for i in range(900,902,1):
            ........
            yield item

I am pass the data to a pipeline object to be written to a sqllite table using sqlalchemy:

class SQLlitePipeline(object):

    def __init__(self):
        _engine = create_engine("sqlite:///data.db")
        _connection = _engine.connect()
        _metadata = MetaData()
        _stack_items = Table("table1", _metadata,
                             Column("id", Integer, primary_key=True),
                             Column("detail_url", Text),
        _metadata.create_all(_engine)
        self.connection = _connection
        self.stack_items = _stack_items

    def process_item(self, item, spider):
        is_valid = True

I'd like to be able to set the table name as a variable instead of being hard coded in as it is now ("table1"). How can this be done?

2条回答
The star\"
2楼-- · 2019-02-18 19:18
class SQLlitePipeline(object):

    def __init__(self, table_name):

        _engine = create_engine("sqlite:///data.db")
        _connection = _engine.connect()
        _metadata = MetaData()
        _stack_items = Table(table_name, _metadata,
                             Column("id", Integer, primary_key=True),
                             Column("detail_url", Text),
        _metadata.create_all(_engine)
        self.connection = _connection
        self.stack_items = _stack_items

    @classmethod
    def from_crawler(cls, crawler):
        table_name = getattr(crawler.spider, 'table_name')
        return cls(table_name)

With from_crawler you can create or instantiate a pipeline object with the parameters you specify.

查看更多
我只想做你的唯一
3楼-- · 2019-02-18 19:20

Assuming you pass this parameter through the command line (e.g. -s table="table1"), define a from_crawler method.

@classmethod
def from_crawler(cls, crawler):
    # Here, you get whatever value was passed through the "table" parameter
    settings = crawler.settings
    table = settings.get('table')

    # Instantiate the pipeline with your table
    return cls(table)

def __init__(self, table):
    _engine = create_engine("sqlite:///data.db")
    _connection = _engine.connect()
    _metadata = MetaData()
    _stack_items = Table(table, _metadata,
                         Column("id", Integer, primary_key=True),
                         Column("detail_url", Text),
    _metadata.create_all(_engine)
    self.connection = _connection
    self.stack_items = _stack_items
查看更多
登录 后发表回答