centos 7.4 + greenplum 5.7 + make

os: centos 7.4
gp: gpdb-5.7.0html

greenplum 簡稱gp,是由postgresql演變而來,感興趣的哥們能夠baidu一下node

三臺機器
node1 爲master host
node二、node3爲segment hostpython

os設置

# cat /etc/centos-release
CentOS Linux release 7.4.1708 (Core)
# uname -a
Linux node1 3.10.0-693.el7.x86_64 #1 SMP Tue Aug 22 21:09:27 UTC 2017 x86_64 x86_64 x86_64 GNU/Linux

修改hostnamelinux

# vi /etc/hostname
node1

使用setup或者 nmtui-edit 配置IPnginx

# setup
或者
# nmtui-edit

修改/etc/hostsc++

vi /etc/hosts

10.0.2.7 node1-pub
10.0.2.8 node2-pub
10.0.2.9 node3-pub

192.168.56.101 node1
192.168.56.102 node2
192.168.56.103 node3

關閉防火牆git

# systemctl stop firewalld.service
# systemctl disable firewalld.service
# iptables -F

關閉selinuxgithub

vi /etc/selinux/config
SELINUX=DISABLED
或者
vi /etc/sysconfig/selinux
SELINUX=DISABLED

修改/etc/sysctl.confweb

#kernel.shmall = 2097152
#kernel.shmmax = 536870912 # Bytes
kernel.shmmni = 4096

fs.aio-max-nr = 1048576
fs.file-max = 68116544

kernel.sem = 250 5120000 100 20480
kernel.sysrq = 1
kernel.core_uses_pid = 1
kernel.msgmnb = 65536
kernel.msgmax = 65536
kernel.msgmni = 2048

net.ipv4.conf.default.rp_filter = 1
net.ipv4.conf.all.arp_filter = 1
net.ipv4.ip_forward = 0
net.ipv4.tcp_syncookies = 1
net.ipv4.tcp_tw_recycle=1
net.ipv4.tcp_max_syn_backlog=4096
net.ipv4.ip_local_port_range = 1025 65535
net.core.netdev_max_backlog=10000
net.core.rmem_default = 262144
net.core.rmem_max = 4194304
net.core.wmem_default = 262144
net.core.wmem_max = 1048586

vm.overcommit_memory=2
vm.overcommit_ratio = 95

當即生效sql

# sysctl -p
# lsipc

修改/etc/security/limits.conf

* soft nofile 65536
* hard nofile 65536
* soft nproc 131072
* hard nproc 131072

上述配置具體含義:

soft nproc: 可打開的文件描述符的最大數(軟限制)
hard nproc: 可打開的文件描述符的最大數(硬限制)
soft nofile:單個用戶可用的最大進程數量(軟限制)
hard nofile:單個用戶可用的最大進程數量(硬限制)

修改 /etc/rc.local

# vi /etc/rc.local
#禁用大頁
if test -f /sys/kernel/mm/transparent_hugepage/enabled; then
   echo never > /sys/kernel/mm/transparent_hugepage/enabled
fi
if test -f /sys/kernel/mm/transparent_hugepage/defrag; then
   echo never > /sys/kernel/mm/transparent_hugepage/defrag
fi
#修改讀塊大小
/usr/sbin/blockdev --setra 16384 /dev/sda
#修改 io scheduler爲deadline
echo deadline > /sys/block/sda/queue/scheduler

# chmod u+x /etc/rc.d/rc.local

或者修改grub

# vi /etc/default/grub
ipv6.disable=1 numa=off elevator=deadline transparent_hugepage=never
# grub2-mkconfig -o /boot/grub2/grub.cfg

建立 greenplum 組

# groupadd -g 10000 gpadmin

建立 greenplum 用戶:

# useradd -u 10000 -g gpadmin gpadmin
# usermod -G root gpadmin
# passwd gpadmin

建立 greenplum 目錄

# mkdir -p /usr/local/greenplum-db
# chown -R gpadmin:gpadmin /usr/local/greenplum-db
# mkdir -p /u01/greenplum-data/
# chown -R gpadmin:gpadmin /u01

源碼編譯安裝

https://github.com/greenplum-db/gpdb/releases
https://github.com/greenplum-db/gpdb/tree/5.7.0

安裝依賴包,有點多

# yum install centos-release-scl epel-release dh-autoreconf devtoolset-6-toolchain
# yum install git wget cmake3 rsync coreutils glib2 lrzsz sysstat e4fsprogs xfsprogs ntp zlib zlib-devel openssl openssl-libs openssl-devel pam pam-devel tcl-devel \
 smartmontools OpenIPMI-tools openldap openldap-devel logrotate libcurl-devel htop perl-Env libffi-devel libtool libaio ed net-tools \
 gcc gcc-c++ glibc-static make curl-devel bzip2-devel psutils psutils-perl liblockfile liblockfile-devel libevent libevent-devel vim-common vim-enhanced \
 perl perl-devel perl-ExtUtils-Embed  readline readline-devel apr apr-devel apr-util apr-util-devel libxml2 libxml2-devel \
 libxslt libxslt-devel bison bison-devel bison-runtime flex flex-devel isomd5sum isomd5sum-devel libyaml libyaml-devel

# yum install python python-devel python-isomd5sum python-setuptools python-py
# yum install python-lockfile 
# yum install python-paramiko 

# vi /etc/ld.so.conf
include ld.so.conf.d/*.conf /usr/local/lib /usr/local/lib64 # ldconfig 

使用pip安裝一些必備工具
pip 的安裝能夠參考 https://pip.pypa.io/en/stable/installing/

# curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
# python get-pip.py

# pip install setuptools
# pip install --upgrade setuptools

# pip install epydoc
# pip install psi
# pip install psutil
# pip install lockfile
# pip install paramiko
# pip install gssapi
# pip install conan
Cannot uninstall 'enum34'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall.

# yum list installed |grep enum34
# yum remove python-enum34.noarch
Running transaction
  Erasing    : python-paramiko-doc-2.1.1-4.el7.noarch                                 1/4 
  Erasing    : python-paramiko-2.1.1-4.el7.noarch                                     2/4 
  Erasing    : python2-cryptography-1.7.2-1.el7_4.1.x86_64                            3/4 
  Erasing    : python-enum34-1.0.4-1.el7.noarch                                       4/4 
  Verifying  : python-paramiko-doc-2.1.1-4.el7.noarch                                 1/4 
  Verifying  : python-enum34-1.0.4-1.el7.noarch                                       2/4 
  Verifying  : python2-cryptography-1.7.2-1.el7_4.1.x86_64                            3/4 
  Verifying  : python-paramiko-2.1.1-4.el7.noarch                                     4/4

安裝cmake (全部節點),若是已經 yum install cmake3,則跳過這一步
參考
https://cmake.org/download/

# cd /root
# mkdir cmake
# cd cmake
# rz
# ./cmake-3.11.0-Linux-x86_64.sh
# ln -s /root/cmake/bin/cmake /usr/bin/cmake

安裝gp-xerces (全部節點)
參考
https://github.com/greenplum-db/gp-xerces
https://github.com/greenplum-db/gp-xerces.git

# cd /root
# git clone https://github.com/greenplum-db/gp-xerces.git
# cd gp-xerces
# mkdir build
# cd ./build
# ../configure --prefix=/usr/local
# make
# make install

安裝re2c (全部節點)
參考
http://re2c.org/install/install.html
https://github.com/skvadrik/re2c
https://github.com/skvadrik/re2c.git

# cd /root
# git clone https://github.com/skvadrik/re2c.git
# cd ./re2c/re2c
# ./autogen.sh
# ./configure --prefix=/usr/local
# make
# make install

安裝ninja (全部節點)
參考
https://ninja-build.org/
https://github.com/ninja-build/ninja
https://github.com/ninja-build/ninja.git

# cd /root
# git clone https://github.com/ninja-build/ninja.git
# cd ninja
# ./configure.py --bootstrap
# ln -s /root/ninja/ninja /usr/bin/ninja

安裝 gporca
這裏囉嗦兩句,gporca是 greenplum 新一代的優化器,在性能上有很大提高
legacy是早期針對單節點 greenplum 而構建的,主要應用於OLTP場景,greenplum在不安裝orca的狀況下使用的是legacy優化器。
爲何要有兩個優化器,其實都是有歷史緣由的,如今的greenplum使用MPP,主要應用場景變爲OLAP場景,legacy對此雖然進行了修改,可是從架構設計上,使得其維護和添加新的功能愈來愈困難,因此有了GPORCA優化器。

一、手動單獨安裝gporca (全部節點)
參考
https://github.com/greenplum-db/gporca
https://github.com/greenplum-db/gporca.git

# cd /root
# git clone https://github.com/greenplum-db/gporca.git
# cd gporca
# cmake -GNinja -H. -Bbuild
# ninja install -C build
--
--
-- Installing: /usr/local/include/gpopt/version.h

待安裝完成後,進入/gporca/build目錄,執行ctest命令進行檢查,確保100% tests passed
# cd /root/gporca/build/
# /root/cmake/bin/ctest
100% tests passed, 0 tests failed out of 157

Total Test time (real) = 194.05 sec

二、自動安裝,其實最後都是拷貝文件到 /usr/local/include/ /usr/local/lib/
(全部節點)
參考 https://github.com/greenplum-db/gpdb/tree/5.7.0

# su - gpadmin
$ echo 'source scl_source enable devtoolset-6' >> ~/.bashrc
$ cd /tmp
$ rz gpdb-5.7.0.tar.gz
$ unzip ./gpdb-5.7.0.zip
$ cd gpdb-5.7.0
$ cd depends
$ ./configure
$ make
===================================================================
Orca can now be installed on the local system using "make install"
and be used as any normal system library

If you'd rather compile GPDB using ORCA in it's current location and then
install ORCA into the gpdb installation location then first run the top
level configure as follows:
LD_LIBRARY_PATH=/tmp/gpdb-5.7.0/depends/build/lib ./configure \ 
    --with-libraries=/tmp/gpdb-5.7.0/depends/build/lib \ 
    --with-includes=/tmp/gpdb-5.7.0/depends/build/include 

Then run "make".
Then run "LD_LIBRARY_PATH=/tmp/gpdb-5.7.0/depends/build/lib make install"

These steps should work on both MacOS and Linux

# make install_local
===================================================================
Orca can now be installed on the local system using "make install"
and be used as any normal system library

If you'd rather compile GPDB using ORCA in it's current location and then
install ORCA into the gpdb installation location then first run the top
level configure as follows:
LD_LIBRARY_PATH=/tmp/gpdb-5.7.0/depends/build/lib ./configure \ 
    --with-libraries=/tmp/gpdb-5.7.0/depends/build/lib \ 
    --with-includes=/tmp/gpdb-5.7.0/depends/build/include 

Then run "make".
Then run "LD_LIBRARY_PATH=/tmp/gpdb-5.7.0/depends/build/lib make install"

These steps should work on both MacOS and Linux
/bin/mkdir -p /usr/local
cp -R build/* /usr/local

最後兩條命令就是拷貝到系統路徑裏
# /bin/mkdir -p /usr/local
# cp -R build/* /usr/local

後來在 gpdb configure 的時候報了以下錯誤
checking Checking ORCA version... configure: error: Your ORCA version is expected to be 2.55.XXX
發現 gporca 的版本是 2.55.13 ,後來經過 第一種方式編譯 gporca的版本爲 2.55.20,沒有再報錯

重裝gporca前清空

rm -rf /usr/local/include/naucrates
rm -rf /usr/local/include/gpdbcost
rm -rf /usr/local/include/gpopt
rm -rf /usr/local/include/gpos
rm -rf /usr/local/lib/libnaucrates.so*
rm -rf /usr/local/lib/libgpdbcost.so*
rm -rf /usr/local/lib/libgpopt.so*
rm -rf /usr/local/lib/libgpos.so*

構建 greenplum

# su - gpadmin
$ cd /tmp
$ unzip ./gpdb-5.7.0.zip
finishing deferred symbolic links:
  gpdb-5.7.0/concourse/tasks/compile_gpdb_open_source.yml -> compile_gpdb_open_source_centos.yml

$ cd /tmp/gpdb-5.7.0

$ ./configure --prefix=/usr/local/greenplum-db --enable-mapreduce --with-perl --with-python --with-libxml --with-gssapi --enable-orca --with-includes=/usr/local/include/ --with-libraries=/usr/local/lib/

若是沒有編譯gporca優化器,則須要添加 --disable-orca 參數,以下
$ ./configure --prefix=/usr/local/greenplum-db --enable-mapreduce --with-perl --with-python --with-libxml --with-gssapi --disable-orca

相對比較全的選項
--with-gssapi --with-pgport=5432 --with-libedit-preferred --with-perl --with-python --with-openssl
--with-pam --with-krb5 --with-ldap --with-libxml --enable-cassert --enable-debug --enable-testutils --enable-debugbreak --enable-depend


中間有以下報錯
./configure: line 11922: #include: command not found

須要屢次make時,能夠執行autoreconf
# cd /tmp/gpdb-5.7.0
# autoreconf -ivf

$ make -j4
$ make install

$ cd /usr/local/greenplum-db
$ ls -l
total 24
drwxrwxr-x 7 gpadmin gpadmin 4096 Apr 17 13:54 bin
drwxrwxr-x 3 gpadmin gpadmin   24 Apr 17 13:54 doc
drwxrwxr-x 3 gpadmin gpadmin   22 Apr 17 13:54 docs
drwxrwxr-x 2 gpadmin gpadmin   25 Apr 17 13:54 etc
-rw-rw-r-- 1 gpadmin gpadmin  698 Apr 17 13:54 greenplum_path.sh
drwxrwxr-x 4 gpadmin gpadmin 4096 Apr 17 13:54 include
drwxrwxr-x 5 gpadmin gpadmin 4096 Apr 17 13:54 lib
drwxrwxr-x 2 gpadmin gpadmin 4096 Apr 17 13:54 sbin
drwxrwxr-x 4 gpadmin gpadmin   41 Apr 17 13:54 share

設置用戶環境變量

# vi /home/gpadmin/.bashrc
source /usr/local/greenplum-db/greenplum_path.sh
# vi /home/gpadmin/.bash_profile
source /usr/local/greenplum-db/greenplum_path.sh

設置完後記得source一下使其當即生效。

# source /home/gpadmin/.bashrc
# source /home/gpadmin/.bash_profile
# echo $PATH

至此 node1上的 greenplum 已經安裝完畢。

須要在全部主機安裝Greenplum二進制版本
在node1上touch all_host all_segment

# source /usr/local/greenplum-db/greenplum_path.sh
# cd $GPHOME
# vi all_host
node1
node2
node3
# vi all_segment
node2
node3

在node1上創建節點信任,須要輸入root密碼

# source /usr/local/greenplum-db/greenplum_path.sh
# cd $GPHOME/bin
# ./gpssh-exkeys -f $GPHOME/all_host

在node1上操做批量安裝

# source /usr/local/greenplum-db/greenplum_path.sh
# cd $GPHOME/bin
# ./gpseginstall -f $GPHOME/all_segment -u gpadmin -p rootroot

在node1上檢查批量安裝狀況

# source /usr/local/greenplum-db/greenplum_path.sh
# cd $GPHOME/bin
# ./gpssh -f $GPHOME/all_segment -e ls -l $GPHOME

在node一、node二、node3上分別檢查數據目錄

# ls -l / |grep -i u01 ;ls -l /u01/ ;

時間同步,各個節點要和master的時間保持一致
使用ntp和標準時間、內部時間同步

# yum install ntp
# systemctl start ntpd
# systemctl enable ntpd

或者使用gpssh來相互同步時鐘

$ gpssh -f all_host -v date
$ gpssh -f all_host -v ntpd

運行gpcheck 來檢查剛配的segment機器的操做系統狀況

# gpcheck -f /usr/local/greenplum-db/all_host

硬件性能效驗

# gpcheckperf -f /usr/local/greenplum-db/all_host -d /tmp -d /home/gpadmin/ -v -r ds

網絡性能效驗:

# gpchecknet -f /usr/local/greenplum-db/all_host -d /tmp
# gpchecknet -f /usr/local/greenplum-db/all_host -r N -d /tmp

初始化

gp初始化文件

$ cp /usr/local/greenplum-db/docs/cli_help/gpconfigs/gpinitsystem_config /usr/local/greenplum-db/
$ chmod 775 ./gpinitsystem_config
$ vi /usr/local/greenplum-db/gpinitsystem_config
$ egrep -v "(^$|^#)" ./gpinitsystem_config

ARRAY_NAME="test Greenplum Data Platform"
SEG_PREFIX=gpseg
PORT_BASE=40000
declare -a DATA_DIRECTORY=(/u01/greenplum-data)
MASTER_HOSTNAME=node1
MASTER_DIRECTORY=/u01/greenplum-data
MASTER_PORT=5432
TRUSTED_SHELL=ssh
CHECK_POINT_SEGMENTS=8
ENCODING=UNICODE
DATABASE_NAME=peiybdb
MACHINE_LIST_FILE=/usr/local/greenplum-db/all_segment

簡單說明下
ARRAY_NAME:設置陣列名稱,默認Greenplum Data Platform。
SEG_PREFIX:設置segment的前綴,默認gpseg。
PORT_BASE:設置segment的起始端口,會今後端口往上增長,默認從40000開始。
DATA_DIRECTORY:設置segment primary的數據存儲目錄,有幾個segment節點就須要設置幾個數據存儲目錄。
MASTER_HOSTNAME:設置master的主機名。
MASTER_DIRECTORY:設置master的存儲目錄。
MASTER_PORT:設置master的端口,默認5432。
TRUSTED_SHELL:設置節點之間的信任方式,默認SSH。
CHECK_POINT_SEGMENTS:預寫日誌文件(WAL)數量,默認爲8,這意味着爲主機上的每一個Segment或Master實例分配1088MB的WAL空間。
ENCODING=UNICODE:設置初始字符集,默認UNICODE(UTF-8)。
MACHINE_LIST_FILE:僅包含segment主機地址
注意,其中全部須要的目錄都是在建立數據存儲區域時作好的。

gp初始化工具

$ cd $GPHOME/bin
$ gpinitsystem -c /usr/local/greenplum-db/gpinitsystem_config -h /usr/local/greenplum-db/all_segment
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checking configuration parameters, please wait...
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Reading Greenplum configuration file /usr/local/greenplum-db/gpinitsystem_config
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Locale has not been set in /usr/local/greenplum-db/gpinitsystem_config, will set to default value
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Locale set to en_US.utf8
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-MASTER_MAX_CONNECT not set, will set to default value 250
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checking configuration parameters, Completed
20180417:17:41:25:005291 gpinitsystem:node1:gpadmin-[INFO]:-Commencing multi-home checks, please wait...
...
20180417:17:41:27:005291 gpinitsystem:node1:gpadmin-[INFO]:-Configuring build for standard array
20180417:17:41:27:005291 gpinitsystem:node1:gpadmin-[INFO]:-Commencing multi-home checks, Completed
20180417:17:41:27:005291 gpinitsystem:node1:gpadmin-[INFO]:-Building primary segment instance array, please wait...
...
20180417:17:41:29:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checking Master host
20180417:17:41:29:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checking new segment hosts, please wait...
...
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checking new segment hosts, Completed
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Greenplum Database Creation Parameters
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:---------------------------------------
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master Configuration
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:---------------------------------------
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master instance name       = test Greenplum Data Platform
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master hostname            = node1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master port                = 5432
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master instance dir        = /u01/greenplum-data/gpseg-1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master LOCALE              = en_US.utf8
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Greenplum segment prefix   = gpseg
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master Database            = peiybdb
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master connections         = 250
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master buffers             = 128000kB
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Segment connections        = 750
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Segment buffers            = 128000kB
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Checkpoint segments        = 8
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Encoding                   = UNICODE
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Postgres param file        = Off
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Initdb to be used          = /usr/local/greenplum-db/bin/initdb
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-GP_LIBRARY_PATH is         = /usr/local/greenplum-db/lib
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-HEAP_CHECKSUM is           = on
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Ulimit check               = Passed
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Array host connect type    = Single hostname per node
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [1]      = ::1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [2]      = 10.0.2.7
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [3]      = 192.168.122.1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [4]      = 192.168.56.101
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [5]      = fe80::a00:27ff:fe6e:8656
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Master IP address [6]      = fe80::a00:27ff:fef3:ba27
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Standby Master             = Not Configured
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Primary segment #          = 1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Total Database segments    = 3
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Trusted shell              = ssh
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Number segment hosts       = 3
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Mirroring config           = OFF
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:----------------------------------------
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-Greenplum Primary Segment Configuration
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:----------------------------------------
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-node1   /u01/greenplum-data/gpseg0  40000   2 0
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-node2   /u01/greenplum-data/gpseg1  40000   3 1
20180417:17:41:39:005291 gpinitsystem:node1:gpadmin-[INFO]:-node3   /u01/greenplum-data/gpseg2  40000   4 2

Continue with Greenplum creation Yy|Nn (default=N):
> y
20180417:17:41:43:005291 gpinitsystem:node1:gpadmin-[INFO]:-Building the Master instance database, please wait...
20180417:17:41:46:005291 gpinitsystem:node1:gpadmin-[INFO]:-Starting the Master in admin mode
20180417:17:41:51:005291 gpinitsystem:node1:gpadmin-[INFO]:-Commencing parallel build of primary segment instances
20180417:17:41:51:005291 gpinitsystem:node1:gpadmin-[INFO]:-Spawning parallel processes    batch [1], please wait...
...
20180417:17:41:51:005291 gpinitsystem:node1:gpadmin-[INFO]:-Waiting for parallel processes batch [1], please wait...
...............................
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:------------------------------------------------
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Parallel process exit status
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:------------------------------------------------
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Total processes marked as completed           = 3
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Total processes marked as killed              = 0
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Total processes marked as failed              = 0
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:------------------------------------------------
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Deleting distributed backout files
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Removing back out file
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-No errors generated from parallel processes
20180417:17:42:23:005291 gpinitsystem:node1:gpadmin-[INFO]:-Restarting the Greenplum instance in production mode

20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Starting gpstop with args: -a -l /home/gpadmin/gpAdminLogs -i -m -d /u01/greenplum-data/gpseg-1
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Gathering information and validating the environment...
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Obtaining Greenplum Master catalog information
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Obtaining Segment details from master...
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Greenplum Version: 'postgres (Greenplum Database) 5.0.0 build dev'
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-There are 0 connections to the database
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Commencing Master instance shutdown with mode='immediate'
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Master host=node1
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Commencing Master instance shutdown with mode=immediate
20180417:17:42:23:009523 gpstop:node1:gpadmin-[INFO]:-Master segment instance directory=/u01/greenplum-data/gpseg-1
20180417:17:42:24:009523 gpstop:node1:gpadmin-[INFO]:-Attempting forceful termination of any leftover master process
20180417:17:42:24:009523 gpstop:node1:gpadmin-[INFO]:-Terminating processes for segment /u01/greenplum-data/gpseg-1
20180417:17:42:25:009553 gpstart:node1:gpadmin-[INFO]:-Starting gpstart with args: -a -l /home/gpadmin/gpAdminLogs -d /u01/greenplum-data/gpseg-1
20180417:17:42:25:009553 gpstart:node1:gpadmin-[INFO]:-Gathering information and validating the environment...
20180417:17:42:25:009553 gpstart:node1:gpadmin-[INFO]:-Greenplum Binary Version: 'postgres (Greenplum Database) 5.0.0 build dev'
20180417:17:42:25:009553 gpstart:node1:gpadmin-[INFO]:-Greenplum Catalog Version: '301705051'
20180417:17:42:25:009553 gpstart:node1:gpadmin-[INFO]:-Starting Master instance in admin mode
20180417:17:42:26:009553 gpstart:node1:gpadmin-[INFO]:-Obtaining Greenplum Master catalog information
20180417:17:42:26:009553 gpstart:node1:gpadmin-[INFO]:-Obtaining Segment details from master...
20180417:17:42:26:009553 gpstart:node1:gpadmin-[INFO]:-Setting new master era
20180417:17:42:26:009553 gpstart:node1:gpadmin-[INFO]:-Master Started...
20180417:17:42:26:009553 gpstart:node1:gpadmin-[INFO]:-Shutting down master
20180417:17:42:27:009553 gpstart:node1:gpadmin-[INFO]:-Commencing parallel segment instance startup, please wait...
........ 
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-Process results...
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-----------------------------------------------------
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-   Successful segment starts                                            = 3
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-   Failed segment starts                                                = 0
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-   Skipped segment starts (segments are marked down in configuration)   = 0
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-----------------------------------------------------
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-Successfully started 3 of 3 segment instances 
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-----------------------------------------------------
20180417:17:42:35:009553 gpstart:node1:gpadmin-[INFO]:-Starting Master instance node1 directory /u01/greenplum-data/gpseg-1 
20180417:17:42:36:009553 gpstart:node1:gpadmin-[INFO]:-Command pg_ctl reports Master node1 instance active
20180417:17:42:36:009553 gpstart:node1:gpadmin-[INFO]:-No standby master configured.  skipping...
20180417:17:42:36:009553 gpstart:node1:gpadmin-[INFO]:-Database successfully started
20180417:17:42:36:005291 gpinitsystem:node1:gpadmin-[INFO]:-Completed restart of Greenplum instance in production mode
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Scanning utility log file for any warning messages
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Log file scan check passed
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Greenplum Database instance successfully created
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-------------------------------------------------------
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-To complete the environment configuration, please 
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-update gpadmin .bashrc file with the following
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-1. Ensure that the greenplum_path.sh file is sourced
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-2. Add "export MASTER_DATA_DIRECTORY=/u01/greenplum-data/gpseg-1"
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-   to access the Greenplum scripts for this instance:
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-   or, use -d /u01/greenplum-data/gpseg-1 option for the Greenplum scripts
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-   Example gpstate -d /u01/greenplum-data/gpseg-1
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Script log file = /home/gpadmin/gpAdminLogs/gpinitsystem_20180417.log
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-To remove instance, run gpdeletesystem utility
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-To initialize a Standby Master Segment for this Greenplum instance
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Review options for gpinitstandby
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-------------------------------------------------------
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-The Master /u01/greenplum-data/gpseg-1/pg_hba.conf post gpinitsystem
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-has been configured to allow all hosts within this new
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-array to intercommunicate. Any hosts external to this
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-new array must be explicitly added to this file
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-Refer to the Greenplum Admin support guide which is
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-located in the /usr/local/greenplum-db/docs directory
20180417:17:42:42:005291 gpinitsystem:node1:gpadmin-[INFO]:-------------------------------------------------------

node1 上增長 greenplum master的數據變量,最終的修改以下:

# vi /home/gpadmin/.bashrc
source /usr/local/greenplum-db/greenplum_path.sh
export MASTER_DATA_DIRECTORY=/u01/greenplum-data/gpseg-1

# vi /home/gpadmin/.bash_profile
source /usr/local/greenplum-db/greenplum_path.sh
export MASTER_DATA_DIRECTORY=/u01/greenplum-data/gpseg-1

node二、node3 做爲segment,最終的修改以下:

# vi /home/gpadmin/.bashrc
source /usr/local/greenplum-db/greenplum_path.sh

# vi /home/gpadmin/.bash_profile
source /usr/local/greenplum-db/greenplum_path.sh

設置訪問權限

$ cd /u01/greenplum-data/gpseg-1
$ ls -l
total 60
drwx------ 6 gpadmin gpadmin 54 Apr 17 17:42 base
drwx------ 2 gpadmin gpadmin 4096 Apr 17 17:42 global
-r-------- 1 gpadmin gpadmin 109 Apr 17 17:41 gp_dbid
drwxrwxr-x 5 gpadmin gpadmin    42 Apr 17 17:41 gpperfmon
-rw-rw-r-- 1 gpadmin gpadmin 860 Apr 17 17:41 gpssh.conf
drwx------ 2 gpadmin gpadmin 6 Apr 17 17:41 pg_changetracking
drwx------ 2 gpadmin gpadmin 18 Apr 17 17:41 pg_clog
drwx------ 2 gpadmin gpadmin 18 Apr 17 17:41 pg_distributedlog
drwx------ 2 gpadmin gpadmin 6 Apr 17 17:41 pg_distributedxidmap
-rw-rw-r-- 1 gpadmin gpadmin 4307 Apr 17 17:41 pg_hba.conf
-rw------- 1 gpadmin gpadmin 1636 Apr 17 17:41 pg_ident.conf
drwx------ 2 gpadmin gpadmin 141 Apr 17 17:42 pg_log
drwx------ 4 gpadmin gpadmin 36 Apr 17 17:41 pg_multixact
drwx------ 2 gpadmin gpadmin 25 Apr 17 17:42 pg_stat_tmp
drwx------ 2 gpadmin gpadmin 18 Apr 17 17:41 pg_subtrans
drwx------ 2 gpadmin gpadmin 6 Apr 17 17:41 pg_tblspc
drwx------ 2 gpadmin gpadmin 6 Apr 17 17:41 pg_twophase
drwx------ 2 gpadmin gpadmin 6 Apr 17 17:42 pg_utilitymodedtmredo
-rw------- 1 gpadmin gpadmin 4 Apr 17 17:41 PG_VERSION
drwx------ 3 gpadmin gpadmin 60 Apr 17 17:42 pg_xlog
-rw------- 1 gpadmin gpadmin 21250 Apr 17 17:41 postgresql.conf
-rw------- 1 gpadmin gpadmin 205 Apr 17 17:42 postmaster.opts
-rw------- 1 gpadmin gpadmin 53 Apr 17 17:42 postmaster.pid

熟悉吧,和 postgresql 的目錄基本是一致的。

$ vi pg_hba.conf
$ psql -d postgres -c "select pg_reload_conf();"
$ psql -d postgres
psql (8.3.23)
Type "help" for help.

postgres=#

補充一下

$ gpinitsystem -c /usr/local/greenplum-db/gpinitsystem_config -h /usr/local/greenplum-db/all_host

-h 應該是 /usr/local/greenplum-db/all_segment,當時拷貝錯了,過後才發現。有空再重作一遍,增長下熟練度。 再次提醒本身,要細心,要細心,要細心

相關文章
相關標籤/搜索