Python開發網絡爬蟲抓取某同城房價信息

前言:html

苦逼的我從某某城市換到另外一個稍微大點的某某城市,面臨的第一個問題就是買房,奮鬥10多年,又回到起點,廢話就很少說了,看看如何設計程序把某同城上的房價數據抓取過來。web

方案:方案思路很簡單,先把網頁內容獲取下來,經過必定規則對內容解析,保存成想要的格式sql

image

難點是對網頁的解析,是一個比較細緻的活,必須邊輸出,邊調試。數據庫

具體實現:oracle

獲取網頁內容:app

def get_page(url):
    headers = {
        'User-Agent': r'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                      r'Chrome/45.0.2454.85 Safari/537.36 115Browser/6.0.3',
        'Referer': r'http://jn.58.com/ershoufang/',
        'Host': r'jn.58.com',
        'Connection': 'keep-alive'
    }
    timeout = 60
    socket.setdefaulttimeout(timeout)  # 設置超時
    req = request.Request(url, headers=headers)
    response = request.urlopen(req).read()
    page = response.decode('utf-8','ignore')
    return pagesocket

第二步解析網頁:解析時要注意無效內容的處理,否則跑起來會報錯,調試很麻煩編碼

def get_58_house(url):   
    html = get_page(url)
    soup =  BeautifulSoup(html,"lxml")
    table =soup.find(id="main")
    df = pd.DataFrame(columns=["op_time","web","house_name","xq","xq1","price","per_price","room","m2","href","ts"])
    for tr in table.find_all('tr'):
        try:
            str_name = tr.find("p","bthead").find("a","t").string.strip()
            str_link = tr.find("p","bthead").find("a","t")["href"]url

           
            ##房產小區位置
            str_xq = list()  
            str_xq1= ''
            str_xq2= ''
            try:
                for s in tr.find_all("a","a_xq1")    :
                    str_xq.append(s.string.strip()) 
                str_xq1= str_xq[0]
                str_xq2= str_xq[1]
            except:
                pass
            ##房產特點
            str_ts =list()
            try:
                for s in tr.find("div","qj-listleft").stripped_strings:
                    str_ts.append(s)
            except:
                pass             設計

            ## 價格信息####################
            str_price =list()
            str_toal =''
            str_per =''
            str_room =''
            str_m2 =''
            try:
                for s in tr.find("div","qj-listright btall").stripped_strings:
                    str_price.append(s)
                str_toal = str_price[0]
                str_per  = re.findall(r"(\d+\.*\d+)",str_price[1])
                str_room = str_price[2]
                str_m2  = re.findall(r"(\d+\.*\d+)",str_price[3])           
            except:
                pass
        except Exception as e:
            print('Exception',":",e)
                       
        try: 
            row = {'web':'58同城','house_name':str_name,'xq':str_xq1,'xq1':str_xq2,'price':str_toal,'per_price':str_per,'room':str_room,'m2':str_m2,'ts':''.join(str_ts),'href':str_link}
            newrow = pd.DataFrame(data=row,index=["0"])
            df=df.append(newrow,ignore_index=True)
        except Exception as e:
            print('Exception',":",e)
            f=open("log.txt",'a')
            traceback.print_exc(file=f) 
            f.write(row) 
            f.flush() 
            f.close()
    df["op_time"]=time.strftime('%Y-%m-%d',time.localtime(time.time()))
    return df

第三步循環處理每頁數據並保存數據:

def get_58_house_all():
    ##創建數據庫鏈接
    engine = create_engine('oracle+cx_oracle://user:password@localhost/orcl')
    cnx = engine.connect() 
    ##先清除今天的數據
    '''
    strSql = 'delete from house where op_time=\'{}\' '.format(time.strftime('%Y-%m-%d',time.localtime(time.time())))
    cnx.execute(strSql)
    '''
    ##獲取首頁房產數據   
    str_http = "http://jn.58.com/ershoufang/"
   
    writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' Start:'+str_http)
       
    df1=get_58_house(str_http)
    try:
        df1.to_sql('house', cnx,if_exists='append')
    except Exception as e:
        '''記錄異常信息
                    本例使用的是oracle 數據庫,默認編碼格式爲GBK,保存時由於特殊字符,致使保存錯誤。錯誤提示以下,須要調整oracle字符集
         oracle 字符集調整爲UTF8,
         NLS_LANG: AMERICAN_AMERICA.AL32UTF8
         NLS_CHARACTERSET: UTF8
         NLS_NCHAR_CHARACTERSET: UTF8
                   報錯信息爲
         UnicodeEncodeError: 'gbk' codec can't encode character '\xb2' in position 13: illegal multibyte sequence
                     該字符爲上標2,平方米          
        '''
        writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' Except:'+str_http)
       
        df1.to_csv('record.csv',sep=',', encoding='utf-8')
        writelog(traceback.format_exc())
       
    writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' End:'+str_http)
    time.sleep(20)
 
    ##獲取其他69頁房產數據
    for i in range(2,70+1) :
        try:
            str_http ="http://jn.58.com/ershoufang/pn"+str(i)
            writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' Start:'+str_http)
           
            df1=get_58_house(str_http)       
            df1.to_sql('house', cnx,if_exists='append')
        except Exception as e:
            ##writelog(''.format('Save to database Exception',":",e)  )
            writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' Except:'+str_http)
           
            df1.to_csv('record.csv',sep=',', encoding='utf-8')
            writelog(traceback.format_exc())
           
        writelog(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))+' End:'+str_http)
        time.sleep(20)

    ##關閉數據連接
    cnx.close()

 

跑跑看看是否是程序一切運行正常。

相關文章
相關標籤/搜索