進入hive_home目錄下 輸入命令: 啓動metastore: 改服務是原數據服務 bin/hive --service metastore 後臺啓動: bin/hive --service metastore 2>&1 >> /usr/local/devtools/cdhbigdata/cdhhive/hive-0.13.1-cdh5.3.6/logs/hive.log & 後臺啓動,關閉shell鏈接依然存在: nohup bin/hive --service metastore 2>&1 >> /usr/local/devtools/cdhbigdata/cdhhive/hive-0.13.1-cdh5.3.6/logs/hive.log & 啓動:hiveserver2 nohup hive --service hiveserver2 & 建立分區表 CREATE EXTERNAL TABLE traffic(ip string, time string, url string) PARTITIONED BY (logdate string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LOCATION '/user/root/output/hivetables/traffic'; 建立分區 ALTER TABLE traffic ADD PARTITION(logdate='2015_04_25') LOCATION '/user/root/output/hivetables/traffic/2013_05_30'; 倒入數據使用 local表示從本地導入,使用的是複製操做,原文件保留,沒有local,表示從hdfs文件系統導入,使用的是剪切操做,原目錄下的文件將被移除。 load data inpath '/user/root/output/part-r-00000' into table traffic PARTITION (logdate='2013_05_30'); PV量 頁面瀏覽量即爲PV(Page View),是指全部用戶瀏覽頁面的總和,一個獨立用戶每打開一個頁面就被記錄1 次。這裏,咱們只須要統計日誌中的記錄個數便可,HQL代碼以下: SELECT COUNT(1) AS PV FROM traffic WHERE logdate='2013_05_30'; create table pv2013_05_30 as SELECT COUNT(1) AS PV FROM traffic WHERE logdate='2013_05_30'; pv 930 註冊用戶數 SELECT * AS REGUSER FROM traffic WHERE logdate='2013_05_30' AND INSTR(url,'/member.php?mod=register')>0; 登錄用戶數 SELECT COUNT(1) AS REGUSER FROM traffic WHERE logdate='2013_05_30' AND INSTR(url,'/member.php?mod=logging')>0; create table registernum2013_05_30 as SELECT COUNT(1) AS REGUSER FROM traffic WHERE logdate='2013_05_30' AND INSTR(url,'/member.php?mod=logging')>0; 18 獨立IP數 一天以內,訪問網站的不一樣獨立 IP 個數加和。其中同一IP不管訪問了幾個頁面,獨立IP 數均爲1。所以,這裏咱們只須要統計日誌中處理的獨立IP數便可,在SQL中咱們能夠經過DISTINCT關鍵字,在HQL中也是經過這個關鍵字: SELECT COUNT(DISTINCT ip) AS IP FROM traffic WHERE logdate='2013_05_30'; create table ip2013_05_30 as SELECT COUNT(DISTINCT ip) AS IP FROM traffic WHERE logdate='2013_05_30'; ip 140 跳出用戶數 只瀏覽了一個頁面便離開了網站的訪問次數,即只瀏覽了一個頁面便再也不訪問的訪問次數。這裏,咱們能夠經過用戶的IP進行分組,若是分組後的記錄數只有一條,那麼即爲跳出用戶。將這些用戶的數量相加,就得出了跳出用戶數 SELECT COUNT(1) AS jump FROM (SELECT COUNT(ip) AS times FROM traffic WHERE logdate='2013_05_30' GROUP BY ip HAVING times=1) e create table jump2013_05_30 as SELECT COUNT(1) AS jump FROM (SELECT COUNT(ip) AS times FROM traffic WHERE logdate='2013_05_30' GROUP BY ip HAVING times=1) e OK jump 61 PS:跳出率是指只瀏覽了一個頁面便離開了網站的訪問次數佔總的訪問次數的百分比,即只瀏覽了一個頁面的訪問次數 / 所有的訪問次數彙總。這裏,咱們能夠將這裏得出的跳出用戶數/PV數便可獲得跳出率。 將全部關鍵指標放入一張彙總表中以便於經過Sqoop導出到MySQL 爲了方便經過Sqoop統一導出到MySQL,這裏咱們藉助一張彙總表將剛剛統計到的結果整合起來,經過錶鏈接結合,HQL代碼以下: CREATE TABLE trafic2013_05_30 AS SELECT '20130530' as date, a.pv, b.reguser, c.ip, d.jump FROM pv2013_05_30 a JOIN registernum2013_05_30 b ON 1=1 JOIN ip2013_05_30 c ON 1=1 JOIN jump2013_05_30 d ON 1=1; create table techbbs_logs_stat( logdate varchar(10) primary key, pv int, regiser int, ip int, jumper int); ./bin/sqoop export --connect jdbc:mysql://xupan001:3306/traffic --username root --password root --table traffic --fields-terminated-by '\001' --export-dir '/user/hive/warehouse/trafic2013_05_30' step4~step8爲新增內容: #!/bin/sh ...... #step4.alter hive table and then add partition hive -e "ALTER TABLE techbbs ADD PARTITION(logdate='${yesterday}') LOCATION '/project/techbbs/cleaned/${yesterday}';" #step5.create hive table everyday hive -e "CREATE TABLE hmbbs_pv_${yesterday} AS SELECT COUNT(1) AS PV FROM hmbbs WHERE logdate='${yesterday}';" hive -e "CREATE TABLE hmbbs_reguser_${yesterday} AS SELECT COUNT(1) AS REGUSER FROM hmbbs WHERE logdate='${yesterday}' AND INSTR(url,'member.php?mod=register')>0;" hive -e "CREATE TABLE hmbbs_ip_${yesterday} AS SELECT COUNT(DISTINCT ip) AS IP FROM hmbbs WHERE logdate='${yesterday}';" hive -e "CREATE TABLE hmbbs_jumper_${yesterday} AS SELECT COUNT(1) AS jumper FROM (SELECT COUNT(ip) AS times FROM hmbbs WHERE logdate='${yesterday}' GROUP BY ip HAVING times=1) e;" hive -e "CREATE TABLE hmbbs_${yesterday} AS SELECT '${yesterday}', a.pv, b.reguser, c.ip, d.jumper FROM hmbbs_pv_${yesterday} a JOIN hmbbs_reguser_${yesterday} b ON 1=1 JOIN hmbbs_ip_${yesterday} c ON 1=1 JOIN hmbbs_jumper_${yesterday} d ON 1=1;" #step6.delete hive tables hive -e "drop table hmbbs_pv_${yesterday};" hive -e "drop table hmbbs_reguser_${yesterday};" hive -e "drop table hmbbs_ip_${yesterday};" hive -e "drop table hmbbs_jumper_${yesterday};" #step7.export to mysql sqoop export --connect jdbc:mysql://hadoop-master:3306/techbbs --username root --password admin --table techbbs_logs_stat --fields-terminated-by '\001' --export-dir '/hive/hmbbs_${yesterday}' #step8.delete hive table hive -e "drop table techbbs_${yesterday};" 這裏將日期字符串做爲參數傳入,將該步驟轉移到了其餘腳本文件中; #!/bin/sh #step1.get yesterday format string #yesterday=`date --date='1 days ago' +%Y_%m_%d` yesterday=$1 #!/bin/sh yesterday=`date --date='1 days ago' +%Y_%m_%d` hmbbs_core.sh $yesterday 改寫crontab定時任務配置:crontab -e * 1 * * * /usr/local/files/apache_logs/techbbs_daily.sh 初始化任務操做 當一個網站已經生成了不少天的日誌,而咱們的日誌分析系統卻一直沒上線,一直等到了某天才上線。這時,咱們須要寫一個初始化腳本任務,來對以前的天天的日誌進行統計分析與導出結果。這裏,咱們新增一個techbbs_init.sh腳本文件,內容以下: #step1.create external table in hive hive -e "CREATE EXTERNAL TABLE techbbs(ip string, atime string, url string) PARTITIONED BY (logdate string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LOCATION '/project/techbbs/cleaned';" #step2.compute the days between start date and end date s1=`date --date="$1" +%s` s2=`date +%s` s3=$((($s2-$s1)/3600/24)) #step3.excute techbbs_core.sh $3 times for ((i=$s3; i>0; i--)) do logdate=`date --date="$i days ago" +%Y_%m_%d` techbbs_core.sh $logdate done