Scrapy鏈接MySQL存入數據 -《狗嗨默示錄》-

settings.pysql

ITEM_PIPELINES = {
   'IAChina.pipelines.IachinaPipeline': 300,
}

 

DBKWARGS = {'db':'iachina','user':'root','password':'',
            'host':'localhost','use_unicode':True,'charset':'utf8'}

 

pipelines.pyide

import MySQLdb

class IachinaPipeline(object):
    def process_item(self, item, spider):
        DBKWARGS = spider.settings.get('DBKWARGS')
        con = MySQLdb.connect(**DBKWARGS)
        cur = con.cursor()
        sql = ("insert into info(COMPANY,TYPE,PRODUCT,CLAUSE,CLAUSE_URL)"
            "values(%s,%s,%s,%s,%s)")
        list = (item['COMPANY'],item['TYPE'],item['PRODUCT'],item['CLAUSE'],item['CLAUSE_URL'])
        try:
            cur.execute(sql,list)
        except Exception as e:
            print("Insert error:",e)
            con.rollback() #未進行數據操做就回滾
        else:
            con.commit() #提交後數據操做才生效
        cur.close()
        con.close()
        return item
相關文章
相關標籤/搜索