1.統計音樂點播次數javascript
2.使用echarts柱狀圖顯示每首音樂的點播次數html
建立JavaEE項目java
package com.etc.mc; import java.io.IOException; import java.util.HashMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; /** 歌曲點播統計 */ public class MusicCount { //定義保存統計數據結果的map集合 public static HashMap<String, Integer> map=new HashMap<String, Integer>(); public static class MusicMapper extends Mapper<Object, Text, Text, IntWritable> { public void map(Object key, Text value, Context context) throws IOException, InterruptedException { IntWritable valueOut = new IntWritable(1); String keyInStr = value.toString(); String[] keyInStrArr = keyInStr.split("\t");// 使用\t將輸入 文本行轉換爲字符串 String keyOut = keyInStrArr[0];// 獲取歌曲名稱 context.write(new Text(keyOut), valueOut); } } public static class MusicReducer extends Reducer<Text, IntWritable, Text, IntWritable> { private IntWritable result = new IntWritable(); public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result);//統計數據保存到hdfs文件 map.put(key.toString(), sum);//將統計結果保存到map集合 } } public static HashMap<String, Integer> main() throws Exception { Configuration conf = new Configuration(); conf.addResource("core-site.xml");// 讀取項目中hdfs配置信息 conf.addResource("mapred-site.xml");// 讀取項目中mapreduce配置信息 // 實例化做業 Job job = Job.getInstance(conf, "music_count"); // 指定jar的class job.setJarByClass(MusicCount.class); // 指定Mapper job.setMapperClass(MusicMapper.class); // 壓縮數據 job.setCombinerClass(MusicReducer.class);// 減小datanode,TaskTracker之間數據傳輸 // 指定reducer job.setReducerClass(MusicReducer.class); // 設置輸出key數據類型 job.setOutputKeyClass(Text.class); // 設置輸出Value數據類型 job.setOutputValueClass(IntWritable.class); // 設置輸入文件路徑 FileInputFormat.addInputPath(job, new Path("hdfs://192.168.137.131:9000/music/music1.txt")); FileInputFormat.addInputPath(job, new Path("hdfs://192.168.137.131:9000/music/music2.txt")); FileInputFormat.addInputPath(job, new Path("hdfs://192.168.137.131:9000/music/music3.txt")); FileInputFormat.addInputPath(job, new Path("hdfs://192.168.137.131:9000/music/music4.txt")); //設置輸出文件路徑 FileSystem fs=FileSystem.get(conf); Path path=new Path("hdfs://192.168.137.131:9000/musicout"); if(fs.exists(path)) { fs.delete(path,true); } FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.137.131:9000/musicout")); if(job.waitForCompletion(true)) { return map; }else { return null; } } }
package com.etc.action; import java.io.IOException; import java.io.PrintWriter; import java.util.HashMap; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.alibaba.fastjson.JSON; import com.etc.mc.MusicCount; /**向客戶端提供json數據*/ @WebServlet("/CountServlet") public class CountServlet extends HttpServlet { private static final long serialVersionUID = 1L; protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { //post亂碼處理 request.setCharacterEncoding("utf-8"); // 設置響應數據類型 response.setContentType("text/html"); // 設置響應編碼格式 response.setCharacterEncoding("utf-8"); // 獲取out對象 PrintWriter out = response.getWriter(); //組織json數據 HashMap<String, Integer> map=null; try { map=MusicCount.main(); } catch (Exception e) { System.out.println("獲取數據出錯"); } //經過構建map集合轉換爲嵌套json格式數據 HashMap jsonmap = new HashMap(); jsonmap.put("mytitle","歌詞播放統計"); jsonmap.put("mylegend", "點播"); jsonmap.put("prolist", map); String str =JSON.toJSONString(jsonmap); out.print(str); out.flush(); out.close(); } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doGet(request, response); } }
<%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>金融大數據解析</title> <!-- 引入 echarts.js --> <script src="script/echarts.min.js"></script> <!-- 引入 jquery.js --> <script src="script/jquery-1.8.3.min.js"></script> </head> <body> <!-- 爲ECharts準備一個具有大小(寬高)的Dom --> <div id="main" style="width: 600px; height: 400px;"></div> <script type="text/javascript"> //顯示柱狀圖函數 function showdata(mytitle, mylegend, xdata, ydata) { // 基於準備好的dom,初始化echarts實例 var myChart = echarts.init(document.getElementById('main')); // 指定圖表的配置項和數據 var option = { title : { text : mytitle }, tooltip : {}, legend : { data : mylegend }, xAxis : { data : xdata }, yAxis : {}, series : [ { name : '點播', type : 'bar', data : ydata } ] }; // 使用剛指定的配置項和數據顯示圖表。 myChart.setOption(option); } $(function() { var mytitle; var mylegend; var xdata=new Array(); var ydata=new Array(); $.getJSON("CountServlet", function(data) { mytitle = data.mytitle; mylegend = data.mylegend; //獲取x軸數據 $.each(data.prolist, function(i, n) { xdata.push(i); }); //獲取y軸數據 $.each(data.prolist, function(i, n) { ydata.push(n); }); //執行函數 showdata(mytitle, [ mylegend ], xdata, ydata); }); }); </script> </body> </html>
1.該案例的缺點是什麼?每次訪問數據須要提交job到hadoop集羣運行,性能低。node
2.數據分析結果保存在HDFS和集合中,不適合分析結果爲大數據集合。jquery
3.如何改進?使用HBase存儲解析後的數據集,構建離線分析和即時查詢大數據分析平臺。apache