對於真正微服務項目來講,服務開發只是第一步,容器化、彈性伸縮和可觀測纔是真正關鍵。本課程將經過電商項目實戰,系統學習徹底形態的微服務,掌握成熟閉環的落地方案。
技術要求
有Go實際開發經驗 掌握Linux操做 熟練掌握MySQL
環境參數
開發語言:Golang 開發平臺:Windows 10 開發工具:GoLandjavascript
#!/usr/bin/python
from bs4 import BeautifulSoup
import requests
def getHouseList(url):
house =[]
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER'}
#get從網頁獲取信息
res = requests.get(url,headers=headers)
#解析內容
soup = BeautifulSoup(res.content,'lxml')
#房源title
housename_divs = soup.findall('div',class='title')
for housename_div in housename_divs:
housename_as=housename_div.find_all('a')
for housename_a in housename_as:
housename=[]
#標題
housename.append(housename_a.get_text())
#超連接
housename.append(housename_a['href'])
house.append(housename)
huseinfo_divs = soup.findall('div',class='houseInfo')
for i in range(len(huseinfo_divs)):
info = huseinfo_divs[i].get_text()
infos = info.split('|')
#小區稱號
house[i].append(infos[0])
#戶型
house[i].append(infos[1])
#平米
house[i].append(infos[2])
#查詢總價
house_prices = soup.findall('div',class='totalPrice')
for i in range(len(house_prices)):
#價錢
price = house_prices[i].get_text()
house[i].append(price)
return house
#爬取房屋細緻信息:所在區域、套內面積
def houseinfo(url):
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER'}
res = requests.get(url,headers=headers)
soup = BeautifulSoup(res.content,'lxml')
msg =[]
#所在區域
areainfos = soup.findall('span',class='info')
for areainfo in areainfos:
#只需求獲取第一個a標籤的內容便可
area = areainfo.find('a')
if(not area):
continue
hrefStr = area['href']
if(hrefStr.startswith('javascript')):
continue
msg.append(area.get_text())
break
#依據房屋戶型計算套內面積
infolist = soup.find_all('div',id='infoList')
num = []
for info in infolist:
cols = info.findall('div',class='col')
for i in cols:
pingmi = i.get_text()
try:
a = float(pingmi[:-2])
num.append(a)
except ValueError:
continue
msg.append(sum(num))
return msg
#將房源信息寫入txt文件
def writeFile(houseinfo):
f = open('d:/房源.txt','a',encoding='utf8')html
f.write(houseinfo+'\n') f.close()
#主函數
def main():
for i in range(1,100):
print('-----分隔符',i,'-------')
if i==1:
url ='https://sjz.lianjia.com/ershoufang/hy1f2f5sf1l3l2l4a2a3a4/'
else:
url='https://sjz.lianjia.com/ershoufang/pg'+str(i)+'hy1f2f5sf1l3l2l4a2a3a4/'
houses =getHouseList(url)
for house in houses:
link = house[1]
if(not link.startswith('http')):
continue
mianji = houseinfo(link)
#將套內面積、所在區域增長到房源信息
house.extend(mianji)
print(house)
info = " ".join([str(x) for x in house])
writeFile(info)
if name == 'main':
main()
從鏈家網站查詢到8849條房源信息,可是頁面只能顯現31(每頁數量)*100(總頁碼)=3100條房源,其餘沒找到。java
第二版:python
獲取某個小區的房源信息,並寫入excel。git
#!/usr/bin/python
from bs4 import BeautifulSoup
import requests
import xlwt
def getHouseList(url):
house =[]
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER'}
#get從網頁獲取信息
res = requests.get(url,headers=headers)
#解析內容
soup = BeautifulSoup(res.content,'html.parser')
#房源title
housename_divs = soup.findall('div',class='title')
for housename_div in housename_divs:
housename_as=housename_div.find_all('a')
for housename_a in housename_as:
housename=[]
#標題
housename.append(housename_a.get_text())
#超連接
housename.append(housename_a.get('href'))
house.append(housename)
huseinfo_divs = soup.findall('div',class='houseInfo')
for i in range(len(huseinfo_divs)):
info = huseinfo_divs[i].get_text()
infos = info.split('|')
#小區稱號
house[i].append(infos[0])
#戶型
house[i].append(infos[1])
#平米
house[i].append(infos[2])
#查詢總價
house_prices = soup.findall('div',class='totalPrice')
for i in range(len(house_prices)):
#價錢
price = house_prices[i].get_text()
house[i].append(price)
return house
#爬取房屋細緻信息:所在區域、套內面積
def houseinfo(url):
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER'}
res = requests.get(url,headers=headers)
soup = BeautifulSoup(res.content,'html.parser')
msg =[]
#所在區域
areainfos = soup.findall('span',class='info')
for areainfo in areainfos:
#只需求獲取第一個a標籤的內容便可
area = areainfo.find('a')
if(not area):
continue
hrefStr = area['href']
if(hrefStr.startswith('javascript')):
continue
msg.append(area.get_text())
break
#依據房屋戶型計算套內面積
infolist = soup.find_all('div',id='infoList')
num = []
for info in infolist:
cols = info.findall('div',class='col')
for i in cols:
pingmi = i.get_text()
try:
a = float(pingmi[:-2])
num.append(a)
except ValueError:
continue
msg.append(sum(num))
return msg
#將房源信息寫入excel文件
def writeExcel(excelPath,houses):
workbook = xlwt.Workbook()
#獲取第一個sheet頁
sheet = workbook.add_sheet('git')
row0=['標題','連接地址','戶型','面積','朝向','總價','所屬區域','套內面積']
for i in range(0,len(row0)):
sheet.write(0,i,row0[i])
for i in range(0,len(houses)):
house = houses[i]
print(house)
for j in range(0,len(house)):
sheet.write(i+1,j,house[j])
workbook.save(excelPath)
#主函數
def main():app