import copy
#10创建数据库
def insertProgressReportDrillById(self, item):
sql = "insert into rpm_project_progress_fromxy(sum_report_progress,sum_production,proj_code) values(%s,%s,%s)"
#对象拷贝,深拷贝 --- 这里是解决办法
asynItem = copy.deepcopy(item)
query = self.dbpool.runInteraction(self._conditional_insertProgressReportDrillById, sql, asynItem)
query.addErrback(self._handle_error)
return item
#10写入数据库中
def _conditional_insertProgressReportDrillById(self, tx, sql, item):
params = (item['sum_report_progress'],item['sum_production'],item['proj_code'])
tx.execute(sql, params)
scrapy爬虫数据入库后有重复数据
最新推荐文章于 2022-06-29 18:10:52 发布