hive的jdbc使用

①新建maven項目,加載依賴包java

 在pom.xml中添加sql

      <dependency>apache

                          <groupId>jdk.tools</groupId>maven

                          <artifactId>jdk.tools</artifactId>oop

                          <version>1.8</version>url

                          <scope>system</scope>spa

                          <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>code

                  </dependency>server

                  <dependency>xml

                          <groupId>junit</groupId>

                          <artifactId>junit</artifactId>

                          <version>3.8.1</version>

                          <scope>test</scope>

                  </dependency>

                  <dependency>

                          <groupId>org.apache.hive</groupId>

                          <artifactId>hive-exec</artifactId>

                          <version>2.1.1</version>

                  </dependency>

 

                  <dependency>

                          <groupId>org.apache.hive</groupId>

                          <artifactId>hive-jdbc</artifactId>

                          <version>2.1.1</version>

                  </dependency>

                  <dependency>

                          <groupId>org.apache.hadoop</groupId>

                          <artifactId>hadoop-common</artifactId>

                          <version>2.6.1</version>

                  </dependency>

②啓動hive的service,啓動集羣

(hive1.2.1版本之後須要使用hiveserver2啓動)

hive –-service hiveserver2 –-hiveconf hive.server2.thrift.port=11111(開啓服務並設置端口號)

③配置core-xite.xml

<property>

      <name>hadoop.proxyuser.neworigin.groups</name>

      <value>*</value>

      <description>Allow the superuser oozie to impersonate any members of the group group1 and group2</description>

 </property>

 <property>

      <name>hadoop.proxyuser.neworigin.hosts</name>

      <value>*</value>

      <description>The superuser can connect only from host1 and host2 to impersonate a user</description>

  </property>

④編寫java代碼

package com.neworigin.HiveTest1;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;

public class JDBCUtil {
static String DriverName="org.apache.hive.jdbc.HiveDriver";
static String url="jdbc:hive2://s100:11111/myhive";
static String user="neworigin";
static String pass="123";
//建立鏈接
public static Connection getConn() throws Exception{
    Class.forName(DriverName);
    Connection conn = DriverManager.getConnection(url,user,pass);
    return conn;
}
//建立命令
public static Statement getStmt(Connection conn) throws SQLException{
    return conn.createStatement();
}
public void closeFunc(Connection conn,Statement stmt) throws SQLException
{
    stmt.close();
    conn.close();
    }
}
package com.neworigin.HiveTest1;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.Statement;

public class JDBCTest {
public static void main(String[] args) throws Exception {
    Connection conn = JDBCUtil.getConn();//建立鏈接
    Statement stmt=JDBCUtil.getStmt(conn);//建立執行對象
    String sql="select * from myhive.employee";//執行sql語句
    String sql2="create table jdbctest(id int,name string)";
    ResultSet set = stmt.executeQuery(sql);//返回執行的結果集
    ResultSetMetaData meta = set.getMetaData();//獲取字段
    while(set.next())
    {
        for(int i=1;i<=meta.getColumnCount();i++)
        {
            System.out.print(set.getString(i)+" ");
        }
        System.out.println();
    }
    System.out.println("第一條sql語句執行完畢");
    boolean b = stmt.execute(sql2);
    if(b)
    {
        System.out.println("成功");
    }
    
}
}
相關文章
相關標籤/搜索