mongodb在插入數據環節避免數據重複的方法(爬蟲中的使用update)

mongo 去重python

import pymongo

client = pymongo.MongoClient()
collection=client.t.test

# collection.insert({'title':'python','name':'deng','age':23})

data={'title':'go','name':'wang','age':45,'url':1}
collection.update({'url':1},{'$set':data},True)

# 上面的案例,表示如何url重複的話,url不更新,其餘字典若是數據不一致就會更新。json

 

爬蟲案例:url

collection.update({'url':data['url'],'cover_url':data['cover_url']},{'$set':data},True)
# coding=utf8
"""
author:dengjiyun
"""
import pymongo

client=pymongo.MongoClient()
collection = client.dou.douban

import requests
url='https://movie.douban.com/j/chart/top_list'

params={
    'type':'11',
    'interval_id':'100:90',
    'action':'',
    'start':'60',
    'limit':'20'
}
headers={
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36'
}

res=requests.get(url,params=params,headers=headers).json()

for item in res:
    data={}
    # print(item['cover_url'])
    data['vote_count']=item['vote_count']  # 評論
    data['score']=item['score']       # 得分
    data['title']=item['title']       # 電影名
    data['url']=item['url']         # 詳情頁url
    data['cover_url']=item['cover_url']   # 封面圖片
    data['rank'] =item['rank']       # 排名
    data['id'] =item['id']         # 電影id
    data['release_date']=item['release_date'] # 發佈日期

    print(item)
    # 不插入重複數據  collection.update()
    collection.update({'url':data['url'],'cover_url':data['cover_url']},{'$set':data},True)
client.close()
相關文章
相關標籤/搜索