hive相關

HIVE JOIN:http://blog.csdn.net/yfkiss/article/details/8073608html

 

HIVE資料:java

一條記錄map階段輸出KV,shuffle sort,輸出KV,最後reduce輸出結果sql

https://skydrive.live.com/?mkt=zh-CN#!/view.aspx?cid=D04547F5707AF6F9&resid=D04547F5707AF6F9%21107&app=PowerPoint數據庫

 

RCFILE:http://www.csdn.net/article/2011-04-29/296900apache

 

http://www.slideshare.net/OReillyStrata/large-scale-etl-with-hadoopapp

 

 HIVE DDL:https://cwiki.apache.org/Hive/languagemanual-ddl.htmlide

 

刪除帶有表的數據庫:oop

DROP DATABASE IF EXISTS db1 CASCADE;spa

刪除空的數據庫:.net

DROP DATABASE IF EXISTS db1 ;

 啓動hive服務:

hive --service hiveserver 

上面的開啓服務是針對hive0.9版本的

0.11版本的hive是以下命令:

hive --service hiveserver2(協議不同)

 JDBC連接HIVE:

https://cwiki.apache.org/Hive/hiveclient.html

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

public class HiveJdbcClient {
  private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

  /**
 * @param args
 * @throws SQLException
   */
  public static void main(String[] args) throws SQLException {
      try {
      Class.forName(driverName);
    } catch (ClassNotFoundException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
      System.exit(1);
    }
    Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
    Statement stmt = con.createStatement();
    // String tableName = "testHiveDriverTable";
    // stmt.executeQuery("drop table " + tableName);
    // ResultSet res = stmt.executeQuery("create table " + tableName + " (key int, value string)");
    // show tables
    // String sql = "show tables '" + tableName + "'";
    // System.out.println("Running: " + sql);
    // res = stmt.executeQuery(sql);
    // if (res.next()) {
    //   System.out.println(res.getString(1));
    // }
    stmt.executeQuery("use etl_sales_db");
    // describe table
    // sql = "describe " + tableName;
    // System.out.println("Running: " + sql);
    // res = stmt.executeQuery(sql);
    // while (res.next()) {
    //   System.out.println(res.getString(1) + "\t" + res.getString(2));
    // }

    // load data into table
    // NOTE: filepath has to be local to the hive server
    // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
    /* String filepath = "/tmp/a.txt";
    sql = "load data local inpath '" + filepath + "' into table " + tableName;
    System.out.println("Running: " + sql);
    res = stmt.executeQuery(sql); */

    // select * query
    String sql = "select count(*) from " + "item";
    System.out.println("Running: " + sql);
    ResultSet res = stmt.executeQuery(sql);
    // while (res.next()) {
    //  System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
    // }
    if (res.next()) {
        System.out.println(res.getString(1));
    }

    // regular hive query
    // sql = "select count(1) from " + tableName;
    // System.out.println("Running: " + sql);
    // res = stmt.executeQuery(sql);
    // while (res.next()) {
    //   System.out.println(res.getString(1));
    // }
  }
}
相關文章
相關標籤/搜索