大數據實驗室(大數據基礎培訓)——Hive的安裝、配置及基礎使用

爲某企業作的培訓,完整文檔見:http://gudaoxuri.github.io/bd-lab/ java

10. Hive

此主題介紹Hive的安裝、配置及基礎使用。mysql

Hive基本信息
官網:http://hive.apache.org/

10.1. 環境準備

# 切換到工做空間
cd /opt/workspaces
# 建立Hive數據目錄
mkdir data/hive
# 建立Hive日誌目錄
mkdir logs/hive

10.2. 安裝

wget http://mirrors.cnnic.cn/apache/hive/hive-1.2.1/apache-hive-1.2.1-bin.tar.gz
tar -zxf apache-hive-1.2.1-bin.tar.gz
rm -rf apache-hive-1.2.1-bin.tar.gz
mv apache-hive-1.2.1-bin ./frameworks/hive

10.3. 配置(單機)

vi ~/.profile
export HIVE_HOME=/opt/workspaces/frameworks/hive
export PATH=$PATH:$HIVE_HOME/bin
source ~/.profile
配置Mysql
mysql -u root -p
create database hive;
grant all on *.* to'hive'@'%' identified by 'hive';  #帳號hive,密碼hive,全部IP均可以訪問(%)
flush privileges;
exit;
建立HDFS目錄
./frameworks/hadoop/bin/hadoop fs -mkdir /tmp
./frameworks/hadoop/bin/hadoop fs -mkdir -p /user/hive/warehouse
./frameworks/hadoop/bin/hadoop fs -chmod 777 /tmp
./frameworks/hadoop/bin/hadoop fs -chmod 777 /user/hive/warehouse
修改Hive配置
cat>./frameworks/hive/conf/hive-env.sh<<EOF
export HADOOP_HEAPSIZE=1024
HADOOP_HOME=/opt/workspaces/frameworks/hadoop
export HIVE_CONF_DIR=/opt/workspaces/frameworks/hive/conf
export HIVE_AUX_JARS_PATH=/opt/workspaces/frameworks/hive/lib
EOF
cat>./frameworks/hive/conf/hive-site.xml<<EOF
<configuration>
    <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://`<Docker Host IP>`:3306/hive</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>hive</value>
    </property>
    <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>hive</value>
    </property>
    <property>
        <name>hive.metastore.uris</name>
        <value>thrift://bd:9083</value>
    </property>
    <property>
        <name>hive.exec.local.scratchdir</name>
        <value>/opt/workspaces/data/hive</value>
    </property>
    <property>
        <name>hive.downloaded.resources.dir</name>
        <value>/opt/workspaces/data/hive</value>
    </property>
    <property>
        <name>hive.querylog.location</name>
        <value>/opt/workspaces/data/hive</value>
    </property>
    <property>
        <name>hive.server2.logging.operation.log.location</name>
        <value>/opt/workspaces/data/hive</value>
    </property>
    <property>
     <name>hive.querylog.location</name>
     <value>/opt/workspaces/frameworks/logs/hive</value>
    </property>
    <property>
      <name>hive.metastore.schema.verification</name>
      <value>false</value>
    </property>
</configuration>
EOF
cp ./frameworks/hive/conf/hive-log4j.properties.template ./frameworks/hive/conf/hive-log4j.properties
vi ./frameworks/hive/conf/hive-log4j.properties
hive.log.dir=/opt/workspaces/frameworks/logs/hive
下載mysql驅動
wget -P ./frameworks/hive/lib http://central.maven.org/maven2/mysql/mysql-connector-java/5.1.36/mysql-connector-java-5.1.36.jar
測試發現5.1.38須要使用SSL鏈接,可設置jdbc://…​&userSSL=false禁用

10.4. 啓動

nohup ./frameworks/hive/bin/hive --service metastore >/dev/null 2>&1 &

10.5. 測試

cat>test_person.txt<<EOF
tom 20
jack    24
nestor  29
EOF
./frameworks/hadoop/bin/hdfs dfs -mkdir /tmp/hivetest/
./frameworks/hadoop/bin/hdfs dfs -put ./test_person.txt /tmp/hivetest/
./frameworks/hive/bin/hive
CREATE EXTERNAL TABLE test_person(name STRING,age INT)ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' LOCATION '/tmp/hivetest';
select * from test_person;
相關文章
相關標籤/搜索