hive_jdbc_url=jdbc:hive2://192.168.0.22:10000/default
hive.dbname=xxxxx
hive_jdbc_username=root
hive_jdbc_password=123456
#配置初始化大小、最小、最大
hive_initialSize=20
hive_minIdle=20
hive_maxActive=500
#配置獲取鏈接等待超時的時間
hive_maxWait=60000
<dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>1.0.26</version> </dependency>
public class HiveDataSourceUtil { private static DruidDataSource hiveDataSource = new DruidDataSource(); public static Connection conn = null; private static final Logger log = LoggerFactory.getLogger(HiveDataSourceUtil.class); public static DruidDataSource getHiveDataSource() { if(hiveDataSource.isInited()){ return hiveDataSource; } try { Properties dsProp = PropertiesUtil.getDataSourceProp(); //基本屬性 url、user、password hiveDataSource.setUrl(dsProp.getProperty("hive_jdbc_url")); hiveDataSource.setUsername(dsProp.getProperty("hive_jdbc_username")); hiveDataSource.setPassword(dsProp.getProperty("hive_jdbc_password")); //配置初始化大小、最小、最大 hiveDataSource.setInitialSize(Integer.parseInt(dsProp.getProperty("hive_initialSize"))); hiveDataSource.setMinIdle(Integer.parseInt(dsProp.getProperty("hive_minIdle"))); hiveDataSource.setMaxActive(Integer.parseInt(dsProp.getProperty("hive_maxActive"))); //配置獲取鏈接等待超時的時間 hiveDataSource.setMaxWait(Integer.parseInt(dsProp.getProperty("hive_maxWait"))); //配置間隔多久才進行一次檢測,檢測須要關閉的空閒鏈接,單位是毫秒 hiveDataSource.setTimeBetweenEvictionRunsMillis(60000); //配置一個鏈接在池中最小生存的時間,單位是毫秒 hiveDataSource.setMinEvictableIdleTimeMillis(300000); // hiveDataSource.setValidationQuery("select * from xxxx"); hiveDataSource.setTestWhileIdle(false); // hiveDataSource.setTestOnBorrow(false); // hiveDataSource.setTestOnReturn(false); //打開PSCache,而且指定每一個鏈接上PSCache的大小 hiveDataSource.setPoolPreparedStatements(true); hiveDataSource.setMaxPoolPreparedStatementPerConnectionSize(20); //配置監控統計攔截的filters // hiveDataSource.setFilters("stat"); hiveDataSource.init(); } catch (SQLException e) { e.printStackTrace(); closeHiveDataSource(); } return hiveDataSource; } /** *@Description:關閉Hive鏈接池 */ public static void closeHiveDataSource(){ if(hiveDataSource != null){ hiveDataSource.close(); } } /** * *@Description:獲取Hive鏈接 *@return */ public static Connection getHiveConn(){ try { hiveDataSource = getHiveDataSource(); conn = hiveDataSource.getConnection(); } catch (SQLException e) { log.error("--"+e+":獲取Hive鏈接失敗!"); } return conn; } /** *@Description:關閉Hive數據鏈接 */ public static void closeConn(){ try { if(conn != null){ conn.close(); } } catch (SQLException e) { log.error("--"+e+":關閉Hive-conn鏈接失敗!"); } } public static void main(String[] args) throws Exception { DataSource ds = HiveDataSourceUtil.getHiveDataSource(); Connection conn = ds.getConnection(); Statement stmt = null; if(conn == null){ System.out.println("null"); }else{ System.out.println("conn"); stmt = conn.createStatement(); ResultSet res = stmt.executeQuery("select * from xxxx t"); int i = 0; while(res.next()){ if(i<10){ System.out.println(res.getString(1)); i++; } } } stmt.close(); conn.close(); } }
打開遠程端口:hive --service hiveserver2 &
PS本身不懂的話,能夠找運維人員