主机名 | IP地址 | 操作系统版本 | 数据库版本 |
orcl1 | 主机IP:192.168.133.104 VIP:192.168.133.114 私有IP:10.10.0.14 | RHEL 7.5 64位 | 11.2.0.4.0 |
orcl2 | 主机IP:192.168.133.105 VIP:192.168.133.115 私有IP:10.10.0.15 | RHEL 7.5 64位 | 11.2.0.4.0 |
orcl1/ orcl2 | SCAN IP: 私有IP:192.168.133.106 |
[grid@orcl1 ~]$ /u01/app/grid/11.2.0/dbhome_1/bin/oifcfg getif ens33 192.168.133.0 global public ens38 10.10.1.0 global cluster_interconnect |
[grid@orcl1 ~]$ srvctl config nodeapps -a Network exists: 1/192.168.133.0/255.255.255.0/ens33, type static VIP exists: /orcl1-vip/192.168.133.114/192.168.133.0/255.255.255.0/ens33, hosting node orcl1 VIP exists: /orcl2-vip/192.168.133.115/192.168.133.0/255.255.255.0/ens33, hosting node orcl2 |
[grid@orcl1 ~]$ crsctl stat res ora.orcl1.vip NAME=ora.orcl1.vip TYPE=ora.cluster_vip_net1.type TARGET=ONLINE STATE=ONLINE on orcl1 [grid@orcl2 ~]$ crsctl stat res ora.orcl2.vip NAME=ora.orcl2.vip TYPE=ora.cluster_vip_net1.type TARGET=ONLINE STATE=ONLINE on orcl2 |
srvctl stop listener -n orcl1 srvctl stop vip -n orcl1 -f |
$ srvctl stop instance -d orcl -n orcl1 $ srvctl stop instance -d orcl -n orcl2 |
cd /u01/app/grid/11.2.0/dbhome_1/gpnp/racdb1/profiles/peer cp -p profile.xml profile.xml.bak |
/u01/app/grid/11.2.0/dbhome_1/bin/crsctl stop crs /u01/app/grid/11.2.0/dbhome_1/bin/crsctl disable crs |
[root@orcl1 ~]# cat /etc/hosts 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 #public ip 192.168.133.104 orcl1 192.168.133.105 orcl2 #priv ip 10.10.1.14 orcl1-priv 10.10.1.15 orcl2-priv #vip ip 192.168.133.114 orcl1-vip 192.168.133.115 orcl2-vip #scan ip192.168.133.106 orcl-scan |
[root@orcl1 ~]# cat /etc/hosts 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 #public ip 192.168.133.154 orcl1 192.168.133.155 orcl2 #priv ip 10.10.1.14 orcl1-priv 10.10.1.15 orcl2-priv #vip ip 192.168.133.164 orcl1-vip 192.168.133.165 orcl2-vip #scan ip 192.168.133.166 orcl-scan |
[root@orcl1 ~]# /u01/app/grid/11.2.0/dbhome_1/bin/srvctl modify nodeapps -n orcl1 -A racdb1-vip/255.255.255.0 |
[grid@orcl1 ~]$ srvctl config nodeapps -a Network exists: 1/192.168.133.0/255.255.255.0/ens33, type static VIP exists: /orcl1-vip/192.168.133.164/192.168.133.0/255.255.255.0/ens33, hosting node orcl1 VIP exists: /orcl2-vip/192.168.133.165/192.168.133.0/255.255.255.0/ens33, hosting node orcl2 |
$ srvctl start vip -n orcl1 $ srvctl start vip -n orcl2 $ srvctl start listener -n orcl1 $ srvctl start listener -n orcl2 |
$ srvctl start instance -d orcl -n orcl1 $ srvctl start instance -d orcl -n orcl2 |
#scan ip 192.168.133.106 orcl-scan |
#scan ip 192.168.133.166 orcl-scan |
[grid@orcl1 ~]$ srvctl config scan SCAN name: orcl-scan, Network: 1/192.168.133.0/255.255.255.0/ens33 SCAN VIP name: scan1, IP: /orcl-scan/192.168.133.106 |
[grid@orcl1 ~]$ srvctl stop scan_listener |
[grid@orcl1 ~]$ srvctl stop scan |
[root@orcl1 ~]# /u01/app/grid/11.2.0/dbhome_1/bin/srvctl modify scan -n orcl-scan |
[grid@orcl1 ~]$ srvctl config scan SCAN name: orcl-scan, Network: 1/192.168.133.0/255.255.255.0/ens33 SCAN VIP name: scan1, IP: /orcl-scan/192.168.133.166 |
[grid@orcl1 ~]$ srvctl start scan [grid@orcl1 ~]$ srvctl start scan_listener |
[grid@orcl1 ~]$ srvctl status scan SCAN VIP scan1 is enabled SCAN VIP scan1 is running on node orcl2 [grid@orcl1 ~]$ srvctl status scan_listener SCAN Listener LISTENER_SCAN1 is enabled SCAN listener LISTENER_SCAN1 is running on node orcl2 [grid@orcl1 ~]$ crsctl stat res ora.scan1.vip ora.LISTENER_SCAN1.lsnr NAME=ora.LISTENER_SCAN1.lsnr TYPE=ora.scan_listener.type TARGET=ONLINE STATE=ONLINE on orcl2 NAME=ora.scan1.vip TYPE=ora.scan_vip.type TARGET=ONLINE STATE=ONLINE on orcl2 |
/u01/app/grid/11.2.0/dbhome_1/bin/crsctl enable crs /u01/app/grid/11.2.0/dbhome_1/bin/crsctl start crs |
欢迎光临 重庆思庄Oracle、Redhat认证学习论坛 (http://bbs.cqsztech.com/) | Powered by Discuz! X3.2 |