sudo systemctl stop firewalld
sudo systemctl disable firewalld
sudo setenforce 0
sudo vi /etc/sysconfig/selinux
SELINUX=disabledhtml
sudo tar -zxf docker-18.06.3-ce.tgz -C /usr/local/
cp /usr/local/docker/* /usr/bin/
mkdir -p /home/dockernode
sudo vi /usr/lib/systemd/system/docker.service
[Unit]
Description=Docker Application Container Engine
Documentation=https://docs.docker.com
After=network-online.target firewalld.service
Wants=network-online.target
[Service]
Type=notify
ExecStart=/usr/bin/dockerd --graph /home/docker
ExecReload=/bin/kill -s HUP $MAINPID
LimitNOFILE=infinity
LimitNPROC=infinity
TimeoutStartSec=0
Delegate=yes
KillMode=process
Restart=on-failure
StartLimitBurst=3
StartLimitInterval=60s
[Install]
WantedBy=multi-user.targetpython
sudo groupadd dockermysql
sudo usermod -aG docker $USERlinux
sudo gpasswd -a $USER dockernginx
newgrp dockerweb
sudo systemctl start dockersql
sudo systemctl enable dockerdocker
docker cp /home/dataexa/insight-deploy/resources/mirrors/mysql/dataexa-insight-microservice-poc.sql mysql:/
docker exec -it mysql bash
mysql -uroot -pDataExa5528280
create database dataexa-insight-microservice-poc
default character set utf8 default collate utf8_general_ci;
show databases;
use dataexa-insight-microservice-poc;數據庫
source /dataexa-insight-microservice-poc.sql
show tables;
docker cp /home/dataexa/insight-deploy/resources/xxl/xxl-job-poc.sql mysql:/
source /xxl-job-poc.sql
鍵盤: curl+ p +q 三個鍵位一塊兒按出現 read escape sequence 便可平滑退出容器
[unzip /home/dataexa/insight-deploy/resources/html/platform.zip -d] /home/dataexa/insight-microservice/workspace/nginx_workspace/html/
unzip [缺unzip離線包]
sudo rpm -ivh /home/dataexa/insight-deploy/resources/tools_package/offline_installer/centos/unzip-6.0-20.el7.x86_64.rpm
sudo rpm -ivh insight-deploy/resources/tools_package/offline_installer/centos/bzip2-1.0.6-13.el7.x86_64.rpm
conda使用的python3.5.tar.gz包變成了zip包
[修正命令]
unzip /home/dataexa/insight-deploy/resources/conda/python/python3.5.zip -d /home/dataexa/anaconda3/envs
mkdir -p /home/dataexa/insight-microservice/workspace/python_service_workspace/container_monitor
mkdir -p /home/dataexa/insight-microservice/workspace/python_service_workspace/dlv5
sudo cp /home/dataexa/insight-deploy/resources/tools_package/offline_installer/ubuntu/vim.zip
sudo cd /usr/local
sudo unzip vim.zip
sudo ln -s /usr/local/vim/bin/vim /usr/local/bin/vim
insight-tagging [建立目錄,cp以前沒目錄]
mkdir -p /home/dataexa/insight-microservice/workspace/python_service_workspace/data_labeling
cd /home/dataexa/insight-microservice/workspace/python_service_workspace/data_labeling/insight-tagging/
find . -name "pycache" |xargs rm -r
Hadoop部署[修正環境變量] 修改jdk和hadoop環境變量都設置到dataexa用戶下
vi ~/.bashrc
export JAVA_HOME=/home/dataexa/insight-deploy/resources/jdk/jdk1.8.0_231
export PATH=$JAVA_HOME/bin:$PATH
export HADOOP_HOME=/home/dataexa/insight-deploy/resources/hadoop/hadoop-2.9.0
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
vi etc/hadoop/hadoop-env.sh
export JAVA_HOME=/home/dataexa/insight-deploy/resources/jdk/jdk1.8.0_231
export PATH=$JAVA_HOME/bin:$PATH
cd /tmp
將標識有hadoop文件名稱的hadoop相關文件 備份移動到另外一個目錄下
從新格式化文件系統
sbin/start-yarn.sh #啓動yarn便可
完整hadoop部署
[修改了jdk和hadoop的環境變量到dataexa用戶下]
tar xf /home/dataexa/insight-deploy/resources/jdk/jdk-8u231-linux-x64.tar.gz -C /home/dataexa/insight-deploy/resources/jdk
vi ~/.bashrc
export JAVA_HOME=/home/dataexa/insight-deploy/resources/jdk/jdk1.8.0_231
export PATH=$JAVA_HOME/bin:$PATH
export HADOOP_HOME=/home/dataexa/insight-deploy/resources/hadoop/hadoop-2.9.0
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
source ~/.bashrc
cd /home/dataexa/insight-deploy/resources/hadoop/hadoop-2.9.0
vi etc/hadoop/hadoop-env.sh
export JAVA_HOME=/home/dataexa/insight-deploy/resources/jdk/jdk1.8.0_231
export PATH=$JAVA_HOME/bin:$PATH
mkdir input
cp etc/hadoop/*.xml input
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep input output 'dfs[a-z.]+'
上面的job是使用hadoop自帶的樣例,在input中統計含有dfs的字符串。
cat output/*
sudo vi /etc/hosts
192.168.1.237 localhost
vi etc/hadoop/core-site.xml
vi etc/hadoop/hdfs-site.xml
ssh-keygen -t rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
ssh dataexa@192.168.1.237
hdfs namenode -format
sbin/start-dfs.sh
jps
hdfs dfs -mkdir /user/test
hdfs dfs -put etc/hadoop /user/test/input
hadoop fs -ls /user/test/input
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep /user/test/input output 'dfs[a-z.]+'
hdfs dfs -cat output/*
bin/hdfs dfs -get output output
cat output/*
sbin/stop-dfs.sh
修改設定文件
cp etc/hadoop/mapred-site.xml.template etc/hadoop/mapred-site.xml
vi etc/hadoop/mapred-site.xml
vi etc/hadoop/yarn-site.xml
sbin/start-yarn.sh
#執行hadoop job
hadoop jar share/hadoop/mapreduce/hadoop-mapreduce-examples-2.9.0.jar grep /user/test/input output 'dfs[a-z.]+'
hdfs dfs -cat output/*
sbin/stop-yarn.sh
sbin/mr-jobhistory-daemon.sh start historyserver