今天同事说有一套rac 19c的环境,不能使用了,让我帮忙看看。
这套rac环境是搭建在华为云ECS上的,操作系统为CentOS 7.6。根据经验,rac不能启动,主要是2个方面的原因:一个是共享存储,一个网络。共享存储常见原因是盘掉了,或盘坏了,或多路径软件出问题等等,而网络问题常见原因是私网网卡坏了,或节点之间网络不通(注意:修改ssh端口或修改oracle和grid密码不会影响rac的正常运行)。
很不幸,这套环境的共享和网络都有问题,下面慢慢分析。
原因一:共享盘掉了
首先,看看2个节点的共享盘是不是一致的,查看后发现节点2少了一块盘,让客户把节点2的盘重新挂载一下,
然后查看,共享盘已经一致了:
1[root@oracle-rac2 ~]# ll /dev/asm*
2lrwxrwxrwx 1 root root 3 Jul 30 11:09 /dev/asm-diska -> sde
3lrwxrwxrwx 1 root root 3 Jul 30 11:09 /dev/asm-diskb -> sdd
4lrwxrwxrwx 1 root root 3 Jul 30 11:03 /dev/asm-diskc -> sdc
5lrwxrwxrwx 1 root root 3 Jul 30 11:03 /dev/asm-diskd -> sdb
6lrwxrwxrwx 1 root root 3 Jul 30 10:55 /dev/asm-diske -> sda
7
8[root@oracle-rac1 trace]# ll /dev/asm*
9lrwxrwxrwx 1 root root 3 Jul 30 11:10 /dev/asm-diska -> sde
10lrwxrwxrwx 1 root root 3 Jul 30 11:03 /dev/asm-diskb -> sdb
11lrwxrwxrwx 1 root root 3 Jul 30 10:23 /dev/asm-diskc -> sda
12lrwxrwxrwx 1 root root 3 Jul 30 11:10 /dev/asm-diskd -> sdd
13lrwxrwxrwx 1 root root 3 Jul 30 11:03 /dev/asm-diske -> sdc
14
15[root@oracle-rac2 ~]# $GRID_HOME/bin/kfod disks=asm st=true ds=true cluster=true
16--------------------------------------------------------------------------------
17 Disk Size Header Path Disk Group User Group
18================================================================================
19 1: 81920 MB MEMBER /dev/asm-diska DATA grid asmadmin
20 2: 81920 MB MEMBER /dev/asm-diskb OCR grid asmadmin
21 3: 81920 MB MEMBER /dev/asm-diskc DATA grid asmadmin
22 4: 81920 MB MEMBER /dev/asm-diskd DATA grid asmadmin
23 5: 81920 MB MEMBER /dev/asm-diske DATA grid asmadmin
24--------------------------------------------------------------------------------
25ORACLE_SID ORACLE_HOME HOST_NAME
26================================================================================
27
28[root@oracle-rac1 trace]# $GRID_HOME/bin/kfod disks=asm st=true ds=true cluster=true
29--------------------------------------------------------------------------------
30 Disk Size Header Path Disk Group User Group
31================================================================================
32 1: 81920 MB MEMBER /dev/asm-diska DATA grid asmadmin
33 2: 81920 MB MEMBER /dev/asm-diskb DATA grid asmadmin
34 3: 81920 MB MEMBER /dev/asm-diskc DATA grid asmadmin
35 4: 81920 MB MEMBER /dev/asm-diskd OCR grid asmadmin
36 5: 81920 MB MEMBER /dev/asm-diske DATA grid asmadmin
37--------------------------------------------------------------------------------
38ORACLE_SID ORACLE_HOME HOST_NAME
39================================================================================
这里,磁盘顺序虽然不一样,但是,没有关系,用的是udev绑定的,不影响rac的运行。
等rac节点正常启动后,可以看到如下内容:
1-- 节点1
2SYS@orcl1> set line 9999
3SYS@orcl1> set pagesize 9999
4SYS@orcl1> col path format a60
5SYS@orcl1> SELECT a.group_number, disk_number,mount_status, a.name, path FROM v$asm_disk a order by a.disk_number;
6select instance_name,status from v$instance;
7
8GROUP_NUMBER DISK_NUMBER MOUNT_STATUS NAME PATH
9------------ ----------- -------------- ------------- ---------------
10 1 0 CACHED DATA_0000 /dev/asm-diskc
11 2 0 CACHED OCR_0000 /dev/asm-diskd
12 1 1 CACHED DATA_0001 /dev/asm-diske
13 1 2 CACHED DATA_0002 /dev/asm-diska
14 1 3 CACHED DATA_0003 /dev/asm-diskb
15
16
17-- 节点2
18SQL> set line 9999
19SQL> set pagesize 9999
20SQL> col path format a60
21SQL> SELECT a.group_number, disk_number,mount_status, a.name, path FROM v$asm_disk a order by a.disk_number;
22select instance_name,status from v$instance;
23
24GROUP_NUMBER DISK_NUMBER MOUNT_S NAME PATH
25------------ ----------- ------- ---------- ---------------
26 2 0 CACHED OCR_0000 /dev/asm-diskb
27 1 0 CACHED DATA_0000 /dev/asm-diske
28 1 1 CACHED DATA_0001 /dev/asm-diskc
29 1 2 CACHED DATA_0002 /dev/asm-diska
30 1 3 CACHED DATA_0003 /dev/asm-diskd
原因二:安全组封了
登陆ECS,发现只有节点1在运行,而节点2没有运行集群服务。
1[root@oracle-rac1 ~]# crsctl stat res -t
2--------------------------------------------------------------------------------
3Name Target State Server State details
4--------------------------------------------------------------------------------
5Local Resources
6--------------------------------------------------------------------------------
7ora.LISTENER.lsnr
8 ONLINE ONLINE oracle-rac1 STABLE
9ora.chad
10 ONLINE ONLINE oracle-rac1 STABLE
11ora.net1.network
12 ONLINE ONLINE oracle-rac1 STABLE
13ora.ons
14 ONLINE ONLINE oracle-rac1 STABLE
15--------------------------------------------------------------------------------
16Cluster Resources
17--------------------------------------------------------------------------------
18ora.ASMNET1LSNR_ASM.lsnr(ora.asmgroup)
19 1 ONLINE ONLINE oracle-rac1 STABLE
20 2 ONLINE OFFLINE STABLE
21 3 ONLINE OFFLINE STABLE
22ora.DATA.dg(ora.asmgroup)
23 1 ONLINE ONLINE oracle-rac1 STABLE
24 2 ONLINE OFFLINE STABLE
25 3 OFFLINE OFFLINE STABLE
26ora.LISTENER_SCAN1.lsnr
27 1 ONLINE ONLINE oracle-rac1 STABLE
28ora.OCR.dg(ora.asmgroup)
29 1 ONLINE ONLINE oracle-rac1 STABLE
30 2 ONLINE OFFLINE STABLE
31 3 OFFLINE OFFLINE STABLE
32ora.asm(ora.asmgroup)
33 1 ONLINE ONLINE oracle-rac1 Started,STABLE
34 2 ONLINE OFFLINE STABLE
35 3 OFFLINE OFFLINE STABLE
36ora.asmnet1.asmnetwork(ora.asmgroup)
37 1 ONLINE ONLINE oracle-rac1 STABLE
38 2 ONLINE OFFLINE STABLE
39 3 OFFLINE OFFLINE STABLE
40ora.cvu
41 1 ONLINE ONLINE oracle-rac1 STABLE
42ora.oracle-rac1.vip
43 1 ONLINE ONLINE oracle-rac1 STABLE
44ora.oracle-rac2.vip
45 1 ONLINE INTERMEDIATE oracle-rac1 FAILED OVER,STABLE
46ora.orcl.db
47 1 ONLINE ONLINE oracle-rac1 Open,HOME=/u01/app/o
48 racle/product/19.3.0
49 /dbhome_1,STABLE
50 2 ONLINE OFFLINE Instance Shutdown,ST
51 ABLE
52ora.qosmserver
53 1 ONLINE ONLINE oracle-rac1 STABLE
54ora.scan1.vip
55 1 ONLINE ONLINE oracle-rac1 STABLE
56--------------------------------------------------------------------------------
使用命令crsctl start has
启动节点2的集群服务后,通过crsctl stat res -t -init
观察启动过程:
1[root@oracle-rac2 ~]# crsctl start has
2CRS-4123: Oracle High Availability Services has been started.
3[root@oracle-rac2 ~]# crsctl stat res -t -init
4--------------------------------------------------------------------------------
5Name Target State Server State details
6--------------------------------------------------------------------------------
7Cluster Resources
8--------------------------------------------------------------------------------
9ora.asm
10 1 ONLINE OFFLINE STABLE
11ora.cluster_interconnect.haip
12 1 ONLINE OFFLINE STABLE
13ora.crf
14 1 ONLINE ONLINE oracle-rac2 STABLE
15ora.crsd
16 1 ONLINE OFFLINE STABLE
17ora.cssd
18 1 ONLINE OFFLINE oracle-rac2 STARTING
19ora.cssdmonitor
20 1 ONLINE ONLINE oracle-rac2 STABLE
21ora.ctssd
22 1 ONLINE OFFLINE STABLE
23ora.diskmon
24 1 OFFLINE OFFLINE STABLE
25ora.evmd
26 1 ONLINE INTERMEDIATE oracle-rac2 STABLE
27ora.gipcd
28 1 ONLINE ONLINE oracle-rac2 STABLE
29ora.gpnpd
30 1 ONLINE ONLINE oracle-rac2 STABLE
31ora.mdnsd
32 1 ONLINE ONLINE oracle-rac2 STABLE
33ora.storage
34 1 ONLINE OFFLINE STABLE
35--------------------------------------------------------------------------------
多次执行crsctl stat res -t -init
看到,启动一直卡在ora.cssd服务智利,查看日志:
1[root@oracle-rac2 ~]# cd /u01/app/grid/diag/crs/oracle-rac2/crs/trace/
2[root@oracle-rac2 trace]# tailf ocssd.trc
3。。。
42021-07-30 09:49:44.851 : CSSD:1881474816: clsssc_CLSFAInit_CB: System not ready for CLSFA initialization
52021-07-30 09:49:45.579 : CSSD:1530889984: [ INFO] clssscWaitOnEventValue: after CmInfo State val 3, eval 1 waited 1000 with cvtimewait status 4294967186
62021-07-30 09:49:45.835 : CSSD:1493997312: [ INFO] clssnmvDHBValidateNCopy: node 1, oracle-rac1, has a disk HB, but no network HB, DHB has rcfg 523049052, wrtcnt, 1472163, LATS 2100744, lastSeqNo 1472162, uniqueness 1627554226, timestamp 1627609785/55291034
72021-07-30 09:49:45.852 : CSSD:1881474816: clsssc_CLSFAInit_CB: System not ready for CLSFA initialization
82021-07-30 09:49:45.874 : CSSD:1486112512: [ INFO] clssscSelect: gipcwait returned with status gipcretTimeout (16)
92021-07-30 09:49:46.579 : CSSD:1530889984: [ INFO] clssscWaitOnEventValue: after CmInfo State val 3, eval 1 waited 1000 with cvtimewait status 4294967186
102021-07-30 09:49:46.837 : CSSD:1493997312: [ INFO] clssnmvDHBValidateNCopy: node 1, oracle-rac1, has a disk HB, but no network HB, DHB has rcfg 523049052, wrtcnt, 1472164, LATS 2101744, lastSeqNo 1472163, uniqueness 1627554226, timestamp 1627609786/55292034
112021-07-30 09:49:46.850 : CSSD:1487689472: [ INFO] clssnmRcfgMgrThread: Local Join
122021-07-30 09:49:46.850 : CSSD:1487689472: [ INFO] clssnmLocalJoinEvent: begin on node(2), waittime 193000
132021-07-30 09:49:46.850 : CSSD:1487689472: [ INFO] clssnmLocalJoinEvent: set curtime (2101764) for my node
142021-07-30 09:49:46.850 : CSSD:1487689472: [ INFO] clssnmLocalJoinEvent: scanning 32 nodes
152021-07-30 09:49:46.850 : CSSD:1487689472: [ INFO] clssnmLocalJoinEvent: Node oracle-rac1, number 1, is in an existing cluster with disk state 3
162021-07-30 09:49:46.850 : CSSD:1487689472: [ WARNING] clssnmLocalJoinEvent: takeover aborted due to cluster member node found on disk
17。。。。
重复输出以上内容,里边有关键词“has a disk HB, but no network HB”,意思是有磁盘心跳,但是没有网络心跳
,猜测是私网通信(网络心跳)的问题,而ohasd进程没有啥有用的信息:
1[root@oracle-rac2 trace]# tailf alert.log
22021-07-30 09:47:56.758 [GIPCD(13240)]CRS-8500: Oracle Clusterware GIPCD process is starting with operating system process ID 13240
32021-07-30 09:48:01.127 [OSYSMOND(13348)]CRS-8500: Oracle Clusterware OSYSMOND process is starting with operating system process ID 13348
42021-07-30 09:48:01.234 [CSSDMONITOR(13346)]CRS-8500: Oracle Clusterware CSSDMONITOR process is starting with operating system process ID 13346
52021-07-30 09:48:02.268 [CSSDAGENT(13407)]CRS-8500: Oracle Clusterware CSSDAGENT process is starting with operating system process ID 13407
62021-07-30 09:48:03.024 [OCSSD(13467)]CRS-8500: Oracle Clusterware OCSSD process is starting with operating system process ID 13467
72021-07-30 09:48:04.135 [OCSSD(13467)]CRS-1713: CSSD daemon is started in hub mode
82021-07-30 09:48:06.416 [OCSSD(13467)]CRS-1707: Lease acquisition for node oracle-rac2 number 2 completed
92021-07-30 09:48:07.528 [OCSSD(13467)]CRS-1621: The IPMI configuration data for this node stored in the Oracle registry is incomplete; details at (:CSSNK00002:) in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc
102021-07-30 09:48:07.528 [OCSSD(13467)]CRS-1617: The information required to do node kill for node oracle-rac2 is incomplete; details at (:CSSNM00004:) in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc
112021-07-30 09:48:07.534 [OCSSD(13467)]CRS-1605: CSSD voting file is online: /dev/asm-diskb; details in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc.
122021-07-30 09:58:02.736 [CSSDAGENT(13407)]CRS-5818: Aborted command 'start' for resource 'ora.cssd'. Details at (:CRSAGF00113:) {0:5:3} in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ohasd_cssdagent_root.trc.
132021-07-30 09:58:02.737 [OCSSD(13467)]CRS-1609: This node is unable to communicate with other nodes in the cluster and is going down to preserve cluster integrity; details at (:CSSNM00086:) in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc.
142021-07-30 09:58:02.785 [OHASD(12890)]CRS-2757: Command 'Start' timed out waiting for response from the resource 'ora.cssd'. Details at (:CRSPE00221:) {0:5:3} in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ohasd.trc.
152021-07-30 09:58:03.738 [OCSSD(13467)]CRS-1656: The CSS daemon is terminating due to a fatal error; Details at (:CSSSC00012:) in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc
162021-07-30 09:58:03.738 [OCSSD(13467)]CRS-1603: CSSD on node oracle-rac2 has been shut down.
172021-07-30 09:58:03.840 [OCSSD(13467)]CRS-1609: This node is unable to communicate with other nodes in the cluster and is going down to preserve cluster integrity; details at (:CSSNM00086:) in /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc.
182021-07-30T09:58:08.746077+08:00
19Errors in file /u01/app/grid/diag/crs/oracle-rac2/crs/trace/ocssd.trc (incident=353):
20CRS-8503 [] [] [] [] [] [] [] [] [] [] [] []
21Incident details in: /u01/app/grid/diag/crs/oracle-rac2/crs/incident/incdir_353/ocssd_i353.trc
22
232021-07-30 09:58:08.739 [OCSSD(13467)]CRS-8503: Oracle Clusterware process OCSSD with operating system process ID 13467 experienced fatal signal or exception code 6.
242021-07-30 09:58:09.829 [CSSDMONITOR(20631)]CRS-8500: Oracle Clusterware CSSDMONITOR process is starting with operating system process ID 20631
25
26[root@oracle-rac2 trace]# tailf ohasd.trc
272021-07-30 09:50:51.551 : CRSPE:1226725120: [ INFO] {0:0:110} Processing PE command id=133 origin:oracle-rac2. Description: [Stat Resource : 0x7f2cec183100]
282021-07-30 09:50:51.551 : CRSPE:1226725120: [ INFO] {0:0:110} Expression Filter : ((LAST_SERVER == oracle-rac2) AND (NAME == ora.cssd))
292021-07-30 09:50:51.562 :UiServer:1220421376: [ INFO] {0:0:110} Done for ctx=0x7f2cf803a9e0
302021-07-30 09:51:02.159 :GIPCHTHR:1243535104: gipchaWorkerWork: workerThread heart beat, time interval since last heartBeat 30080 loopCount 38 sendCount 0 recvCount 102 postCount 0 sendCmplCount 0 recvCmplCount 0
312021-07-30 09:51:04.819 :GIPCHTHR:1216218880: gipchaDaemonWork: DaemonThread heart beat, time interval since last heartBeat 30860loopCount 28
322021-07-30 09:51:13.992 :UiServer:1220421376: [ INFO] {0:0:111} Sending to PE. ctx= 0x7f2cf803d490, ClientPID=13128 set Properties (grid,101398)
332021-07-30 09:51:13.992 : CRSPE:1226725120: [ INFO] {0:0:111} Processing PE command id=134 origin:oracle-rac2. Description: [Stat Resource : 0x7f2cec183100]
342021-07-30 09:51:14.002 :UiServer:1220421376: [ INFO] {0:0:111} Done for ctx=0x7f2cf803d490
352021-07-30 09:51:32.232 :GIPCHTHR:1243535104: gipchaWorkerWork: workerThread heart beat, time interval since last heartBeat 30070 loopCount 31 sendCount 0 recvCount 36 postCount 0 sendCmplCount 0 recvCmplCount 0
362021-07-30 09:51:35.322 :GIPCHTHR:1216218880: gipchaDaemonWork: DaemonThread heart beat, time interval since last heartBeat 30500loopCount 28
372021-07-30 09:51:49.722 : CRSPE:1226725120: [ INFO] {0:0:2} waiting for message 'RESOURCE_START[ora.cssd 1 1] ID 4098:339, tint:{0:5:3}' to be completed on server : oracle-rac2
382021-07-30 09:51:51.431 :UiServer:1220421376: [ INFO] {0:0:112} Sending to PE. ctx= 0x7f2cf803a810, ClientPID=12992 set Properties (root,102335), orig.tint: {0:0:2}
392021-07-30 09:51:51.431 : CRSPE:1226725120: [ INFO] {0:0:112} Processing PE command id=135 origin:oracle-rac2. Description: [Stat Resource : 0x7f2cec183100]
402021-07-30 09:51:51.438 :UiServer:1220421376: [ INFO] {0:0:112} Done for ctx=0x7f2cf803a810
412021-07-30 09:51:51.487 :UiServer:1220421376: [ INFO] {0:0:113} Sending to PE. ctx= 0x7f2cf803da20, ClientPID=12992 set Properties (root,103205), orig.tint: {0:0:2}
422021-07-30 09:51:51.487 : CRSPE:1226725120: [ INFO] {0:0:113} Processing PE command id=136 origin:oracle-rac2. Description: [Stat Resource : 0x7f2cec183100]
432021-07-30 09:51:51.494 :UiServer:1220421376: [ INFO] {0:0:113} Done for ctx=0x7f2cf803da20
442021-07-30 09:51:51.539 :UiServer:1220421376: [ INFO] {0:0:114} Sending to PE. ctx= 0x7f2cf80012a0, ClientPID=12992 set Properties (root,103573), orig.tint: {0:0:2}
452021-07-30 09:51:51.539 : CRSPE:1226725120: [ INFO] {0:0:114} Processing PE command id=137 origin:oracle-rac2. Description: [Stat Resource : 0x7f2cec183100]
462021-07-30 09:51:51.539 : CRSPE:1226725120: [ INFO] {0:0:114} Expression Filter : ((LAST_SERVER == oracle-rac2) AND (NAME == ora.cssd))
472021-07-30 09:51:51.545 :UiServer:1220421376: [ INFO] {0:0:114} Done for ctx=0x7f2cf80012a0
48
节点1的日志也没有啥可用的信息。
使用ping命令ping公网和私网,都没有问题:
1[root@oracle-rac2 ~]# ping oracle-rac1
2PING oracle-rac1 (172.18.0.66) 56(84) bytes of data.
364 bytes from oracle-rac1 (172.18.0.66): icmp_seq=1 ttl=64 time=0.250 ms
4^C
5--- oracle-rac1 ping statistics ---
61 packets transmitted, 1 received, 0% packet loss, time 0ms
7rtt min/avg/max/mdev = 0.250/0.250/0.250/0.000 ms
8[root@oracle-rac2 ~]# ping oracle-rac1-priv
9PING oracle-rac1-priv (172.18.1.66) 56(84) bytes of data.
1064 bytes from oracle-rac1-priv (172.18.1.66): icmp_seq=1 ttl=64 time=0.360 ms
11^C
12--- oracle-rac1-priv ping statistics ---
131 packets transmitted, 1 received, 0% packet loss, time 0ms
14rtt min/avg/max/mdev = 0.360/0.360/0.360/0.000 ms
接下来用traceroute
来查看:
1[root@oracle-rac2 ~]# traceroute oracle-rac1
2traceroute to oracle-rac1 (172.18.0.66), 30 hops max, 60 byte packets
3 1 * * *
4 2 * * *
5 3 * * *
6 4 * * *
7 5 * * *
8 6 * * *
9 7 * * *
10 8 * * *
11 9 * * *
1210 * * *
1311 * * *
1412 * * *
1513 * * *
1614 * * *
1715 * * *
1816 * * *^C
19[root@oracle-rac2 ~]# traceroute oracle-rac1-priv
20traceroute to oracle-rac1-priv (172.18.1.66), 30 hops max, 60 byte packets
21 1 * * *
22 2 * * *
23 3 * * *
24 4 * * *
25 5 * * *
26 6 *^C
27[root@oracle-rac2 ~]# traceroute oracle-rac2
28traceroute to oracle-rac2 (127.0.0.1), 30 hops max, 60 byte packets
29 1 localhost (127.0.0.1) 0.021 ms 0.003 ms 0.003 ms
发现,使用traceroute结果不通(而且公网和私网都不通),说明网络的确有问题。可见,能ping通,不一定网络就没有问题,能ping通只是说明节点之间的ICMP协议通信没有问题。
之前没碰到过这类错误,只能求助于MOS和各大搜索引擎了,下面总结一下网友碰到的原因:
-
CSSD not starting up on second Node in a 2 Node Cluster. (Doc ID 2519544.1) :原因是服务器启动了安全类的软件或中病毒了,需要将安全类的软件停止才可以。
禁用HAIP:这个环境之前已经禁用过了,可以参考:https://www.xmmup.com/dbbao44oracle-racjiqunzhongdeipleixingjianjie.html
交换机和网卡有问题:我把网卡删掉,重建了一次网卡(类似于拔掉网卡,再插入),依然没有解决。网卡先down再up也没有解决,可见我的系统不是网卡本身的问题。
防火墙问题:该系统防火墙是关闭的。
多播工作机制异常:多播工作机制可以通过执行oracle的
mcasttest.pl
脚本进行检测,参考Grid Infrastructure Startup During Patching, Install or Upgrade May Fail Due to Multicasting Requirement (ID 1212703.1)。我运行脚本的时候报错,就没有检查了,感觉不是这个原因引起的。节点进程gipcd.bin存在异常:gipcd.bin进程负责管理集群中所有的私有网卡,有一定原因是由于正常节点1服务器此进程出现异常,导致节点2服务器集群HAIP一直处于starting,cssd进程无法启动。
1) 手动终止节点1的gipcd.bin进程,kill之后发现集群会自动将此进程拉起,无需重启节点1的集群服务
2) 重启节点2集群服务,集群状态恢复正常,问题解决,由此判断此故障是由于正常节点集群gipcd.bin进程异常造成oracle bug:此问题可能是BUG 13334158、BUG13811209、BUG12720728造成,在11.2.0.3.12 G I PSU3及以上PSU修复,可参考相关文档。
其他解决方案
导致节点2启动失败的可能原因有很多,如果上述方案无法解决,可参考以下方案:
a) 关闭正常节点1集群服务,先重启节点2集群服务再重启节点1集群服务(结果发现我的环境,节点1启动不起来,就是同时只能有1个节点启动)
b) 关闭所有集群服务,拔掉心跳线,15分钟后重新插入,再重启集群服务(可反复插拔进行尝试)
c) 手动启动haip:crsctl start res ora.cluster_interconnect.haip -init
,或禁用HAIP
d) 重启两台服务器。节点互信问题:其实,只要安装好rac后,修改oracle和grid用户的密码或者修改sshd的默认22端口,都不会导致rac的问题,因为互信是通过密钥来通信的,尽管如此,我还是测试了一下互信,没有问题:
1[oracle@oracle-rac2 ~]
2Fri Jul 30 10:32:20 CST 2021
3[grid@oracle-rac2 ~]sshoracleEEracdateFriJulCSTEEgridoracleEEracEEEE ssh grid@oracle-rac1 date
4Fri Jul 30 10:33:48 CST 2021
5这里,需要说明的一点是,如果你修改过sshd的端口为非22端口,那么应该同时修改/etc/services文件,否则直接用ssh测试肯定不通:
-
1[grid@oracle-rac2 ~]$ more /etc/services | grep SSH
2ssh 1520/tcp # The Secure Shell (SSH) Protocol
3ssh 1520/udp # The Secure Shell (SSH) Protocol -
重新跑root.sh脚本:这是万般无奈下的解决办法(可以解决很多问题),可惜我做了一次后,也失败了,脚本列出来:
1-- 查询ASM磁盘
2$GRID_HOME/bin/kfod disks=asm st=true ds=true cluster=true
3
4#root用户重新执行root.sh,配置文件:$ORACLE_HOME/crs/install/crsconfig_params
5crsctl stop crs -f
6#kill -9 `ps -ef|grep d.bin| grep -v grep | awk '{print $2}'`
7export GRID_HOME=$ORACLE_HOME
8$GRID_HOME/crs/install/rootcrs.pl -deconfig -force -verbose
9$GRID_HOME/crs/install/rootcrs.pl -deconfig -force -verbose -lastnode -keepdg
10#find $ORACLE_HOME/gpnp/* -type f -exec rm -rf {} ;
11dd if=/dev/zero of=/dev/rhdiskN bs=1024k count=100 -- 清理磁盘头
12#查看磁盘是否有内容
13#AIX用:lquerypv -h /dev/rhdisk5
14#linux用:hexdump /dev/sdb ,例如:hexdump -n 1024 -C /dev/sdb1
15
16#保证集群完全关闭,最好重启OS,检查网络情况,是否有cvuqdisk
17#find /u01 -name cvuqdisk*
18#rpm -qa cvuqdisk
19#rpm -ivh $GRID_HOME/cv/rpm/cvuqdisk-*.rpm
20#检查配置文件($ORACLE_HOME/crs/install/crsconfig_params)是否需要修改,18c可以去掉MGMTDB
21$GRID_HOME/root.sh #若出现错误,则在解决错误后可以重复执行,不需要先deconfig
到此,从早上10点到晚上8点,时间就这样过去了。。。。已经打算放弃了,准备卸载掉rac,重新安装,可是就是不甘心,没找到原因。。。。:
。。。。然后洗了个澡,当然,洗澡的同时脑子一直在想还有哪个方面没有考虑到,,,最后想到,是不是有人动了华为云的安全组策略了,因为这套rac之前中过病毒,可能客户修改过安全组,于是,赶紧查看安全组,发现:
果然,用户把udp端口都没有开!!!!
于是,添加2个节点之间的所有udp端口,如下所示:
然后,节点2立马就通了,traceroute输出结果:
1[root@oracle-rac2 ~]# traceroute oracle-rac2
2traceroute to oracle-rac2 (127.0.0.1), 30 hops max, 60 byte packets
3 1 localhost (127.0.0.1) 0.017 ms 0.003 ms 0.003 ms
4[root@oracle-rac2 ~]# traceroute oracle-rac1
5traceroute to oracle-rac1 (172.18.0.66), 30 hops max, 60 byte packets
6 1 oracle-rac1 (172.18.0.66) 0.276 ms 0.265 ms 0.251 ms
7
8[root@oracle-rac1 ~]# traceroute oracle-rac2
9traceroute to oracle-rac2 (172.18.0.67), 30 hops max, 60 byte packets
10 1 oracle-rac2 (172.18.0.67) 0.274 ms 0.271 ms 0.256 ms
11[root@oracle-rac1 ~]# traceroute oracle-rac1
12traceroute to oracle-rac1 (172.18.0.66), 30 hops max, 60 byte packets
13 1 oracle-rac1 (172.18.0.66) 0.014 ms 0.003 ms 0.002 ms
再去启动节点2,发现一切正常,此时是晚上10点左右:
1[root@oracle-rac2 ~]# crsctl start has
2CRS-4123: Oracle High Availability Services has been started.
3[root@oracle-rac2 ~]# crsctl stat res -t
4--------------------------------------------------------------------------------
5Name Target State Server State details
6--------------------------------------------------------------------------------
7Local Resources
8--------------------------------------------------------------------------------
9ora.LISTENER.lsnr
10 ONLINE ONLINE oracle-rac1 STABLE
11 ONLINE ONLINE oracle-rac2 STABLE
12ora.chad
13 ONLINE ONLINE oracle-rac1 STABLE
14 ONLINE ONLINE oracle-rac2 STABLE
15ora.net1.network
16 ONLINE ONLINE oracle-rac1 STABLE
17 ONLINE ONLINE oracle-rac2 STABLE
18ora.ons
19 ONLINE ONLINE oracle-rac1 STABLE
20 ONLINE ONLINE oracle-rac2 STABLE
21--------------------------------------------------------------------------------
22Cluster Resources
23--------------------------------------------------------------------------------
24ora.ASMNET1LSNR_ASM.lsnr(ora.asmgroup)
25 1 ONLINE ONLINE oracle-rac1 STABLE
26 2 ONLINE ONLINE oracle-rac2 STABLE
27 3 ONLINE OFFLINE STABLE
28ora.DATA.dg(ora.asmgroup)
29 1 ONLINE ONLINE oracle-rac1 STABLE
30 2 ONLINE ONLINE oracle-rac2 STABLE
31 3 OFFLINE OFFLINE STABLE
32ora.LISTENER_SCAN1.lsnr
33 1 ONLINE ONLINE oracle-rac1 STABLE
34ora.OCR.dg(ora.asmgroup)
35 1 ONLINE ONLINE oracle-rac1 STABLE
36 2 ONLINE ONLINE oracle-rac2 STABLE
37 3 OFFLINE OFFLINE STABLE
38ora.asm(ora.asmgroup)
39 1 ONLINE ONLINE oracle-rac1 Started,STABLE
40 2 ONLINE ONLINE oracle-rac2 Started,STABLE
41 3 OFFLINE OFFLINE STABLE
42ora.asmnet1.asmnetwork(ora.asmgroup)
43 1 ONLINE ONLINE oracle-rac1 STABLE
44 2 ONLINE ONLINE oracle-rac2 STABLE
45 3 OFFLINE OFFLINE STABLE
46ora.cvu
47 1 ONLINE ONLINE oracle-rac1 STABLE
48ora.oracle-rac1.vip
49 1 ONLINE ONLINE oracle-rac1 STABLE
50ora.oracle-rac2.vip
51 1 ONLINE ONLINE oracle-rac2 STABLE
52ora.orcl.db
53 1 ONLINE ONLINE oracle-rac1 Open,HOME=/u01/app/o
54 racle/product/19.3.0
55 /dbhome_1,STABLE
56 2 ONLINE ONLINE oracle-rac2 Open,HOME=/u01/app/o
57 racle/product/19.3.0
58 /dbhome_1,STABLE
59ora.qosmserver
60 1 ONLINE ONLINE oracle-rac1 STABLE
61ora.scan1.vip
62 1 ONLINE ONLINE oracle-rac1 STABLE
63--------------------------------------------------------------------------------
一个安全组的问题,耗了我1天!!!
rac中的udp端口
其实,rac的节点之间通信有很多端口是通过udp端口进行通信的(例如cssd、ohasd等),也有部分是通过tcp端口通信的(例如tnslsnr),
1[root@oracle-rac2 ~]# netstat -ulp
2Active Internet connections (only servers)
3Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name
4udp 0 0 localhost:13530 0.0.0.0:* 22668/asm_vktm_+ASM
5udp 0 0 oracle-rac2-priv:13530 0.0.0.0:* 22668/asm_vktm_+ASM
6udp 0 0 0.0.0.0:mdns 0.0.0.0:* 20912/mdnsd.bin
7udp 0 0 0.0.0.0:mdns 0.0.0.0:* 20912/mdnsd.bin
8udp 0 0 localhost:13554 0.0.0.0:* 25142/ora_p000_orcl
9udp 0 0 oracle-rac2-priv:38137 0.0.0.0:* 23563/ora_ipc0_orcl
10udp 0 0 localhost:38153 0.0.0.0:* 23871/ora_ocf0_orcl
11udp 0 0 localhost:38162 0.0.0.0:* 25281/ora_q004_orcl
12udp 0 0 localhost:46396 0.0.0.0:* 25168/ora_p00b_orcl
13udp 0 0 oracle-rac2-priv:38212 0.0.0.0:* 25144/ora_p001_orcl
14udp 0 0 oracle-rac2-priv:46452 0.0.0.0:* 23629/ora_lmd1_orcl
15udp 0 0 oracle-rac2-priv:54646 0.0.0.0:* 23613/ora_dia0_orcl
16udp 0 0 oracle-rac2-priv:62850 0.0.0.0:* 24994/ora_rcbg_orcl
17udp 0 0 localhost:21905 0.0.0.0:* 25265/ora_q002_orcl
18udp 0 0 oracle-rac2-priv:46485 0.0.0.0:* 22698/asm_dia0_+ASM
19udp 0 0 localhost:38304 0.0.0.0:* 23747/ora_tmon_orcl
20udp 0 0 oracle-rac2-priv:54708 0.0.0.0:* 23619/ora_lms1_orcl
21udp 0 0 localhost:54752 0.0.0.0:* 22840/asm_m000_+ASM
22udp 0 0 oracle-rac2-priv:54752 0.0.0.0:* 22840/asm_m000_+ASM
23udp 0 0 oracle-rac2-priv:30178 0.0.0.0:* 22719/asm_ckpt_+ASM
24udp 0 0 oracle-rac2-priv:54759 0.0.0.0:* 23674/oracle+ASM2_a
25udp 0 0 localhost:22026 0.0.0.0:* 22644/asm_clmn_+ASM
26udp 0 0 oracle-rac2-priv:22026 0.0.0.0:* 22644/asm_clmn_+ASM
27udp 0 0 localhost:62992 0.0.0.0:* 25260/ora_qm02_orcl
28udp 0 0 oracle-rac2-priv:62998 0.0.0.0:* 23563/ora_ipc0_orcl
29udp 0 0 oracle-rac2-priv:13848 0.0.0.0:* 23631/ora_rms0_orcl
30udp 0 0 localhost:38454 0.0.0.0:* 22688/asm_ping_+ASM
31udp 0 0 oracle-rac2-priv:38454 0.0.0.0:* 22688/asm_ping_+ASM
32udp 0 0 oracle-rac2-priv:13897 0.0.0.0:* 23573/ora_gen0_orcl
33udp 0 0 localhost:13934 0.0.0.0:* 23683/ora_mmnl_orcl
34udp 0 0 localhost:13967 0.0.0.0:* 23181/asm_o000_+ASM
35udp 0 0 oracle-rac2-priv:13967 0.0.0.0:* 23181/asm_o000_+ASM
36udp 0 0 oracle-rac2-priv:38570 0.0.0.0:* 25157/ora_p007_orcl
37udp 0 0 localhost:38610 0.0.0.0:* 22730/asm_gmon_+ASM
38udp 0 0 oracle-rac2-priv:38610 0.0.0.0:* 22730/asm_gmon_+ASM
39udp 0 0 oracle-rac2-priv:63232 0.0.0.0:* 23635/ora_lck1_orcl
40udp 0 0 oracle-rac2-priv:63314 0.0.0.0:* 23671/ora_mmon_orcl
41udp 0 0 oracle-rac2-priv:22358 0.0.0.0:* 25147/ora_p002_orcl
42udp 0 0 oracle-rac2-priv:63337 0.0.0.0:* 25281/ora_q004_orcl
43udp 0 0 oracle-rac2-priv:46961 0.0.0.0:* 23604/ora_ping_orcl
44udp 0 0 localhost:30578 0.0.0.0:* 23563/ora_ipc0_orcl
45udp 0 0 oracle-rac2-priv:22404 0.0.0.0:* 23617/ora_lms0_orcl
46udp 0 0 oracle-rac2-priv:30599 0.0.0.0:* 22682/asm_diag_+ASM
47udp 0 0 localhost:38875 0.0.0.0:* 23617/ora_lms0_orcl
48udp 0 0 oracle-rac2-priv:38893 0.0.0.0:* 24111/ora_ppa7_orcl
49udp 0 0 localhost:47092 0.0.0.0:* 25179/ora_p00f_orcl
50udp 0 0 oracle-rac2-priv:55297 0.0.0.0:* 25281/ora_q004_orcl
51udp 0 0 oracle-rac2-priv:38923 0.0.0.0:* 25177/ora_p00e_orcl
52udp 0 0 oracle-rac2-priv:30750 0.0.0.0:* 23600/ora_dbrm_orcl
53udp 0 0 oracle-rac2-priv:63578 0.0.0.0:* 23023/asm_ppa7_+ASM
54udp 0 0 oracle-rac2-priv:22643 0.0.0.0:* 23656/ora_reco_orcl
55udp 0 0 oracle-rac2-priv:55450 0.0.0.0:* 23606/ora_svcb_orcl
56udp 0 0 oracle-rac2-priv:22684 0.0.0.0:* 25142/ora_p000_orcl
57udp 0 0 oracle-rac2-priv:55509 0.0.0.0:* 25170/ora_p00c_orcl
58udp 0 0 oracle-rac2-priv:55521 0.0.0.0:* 24577/ora_w003_orcl
59udp 0 0 oracle-rac2-priv:30946 0.0.0.0:* 23645/ora_ckpt_orcl
60udp 0 0 localhost:55527 0.0.0.0:* 25162/ora_p008_orcl
61udp 0 0 localhost:14584 0.0.0.0:* 22713/asm_dbw0_+ASM
62udp 0 0 oracle-rac2-priv:14584 0.0.0.0:* 22713/asm_dbw0_+ASM
63udp 0 0 oracle-rac2-priv:22779 0.0.0.0:* 23729/ora_rsmn_orcl
64udp 0 0 localhost:55554 0.0.0.0:* 23579/ora_gen1_orcl
65udp 0 0 oracle-rac2-priv:14621 0.0.0.0:* 25129/ora_aqpc_orcl
66udp 0 0 oracle-rac2-priv:63778 0.0.0.0:* 22682/asm_diag_+ASM
67udp 0 0 localhost:55658 0.0.0.0:* 23183/oracle+ASM2_o
68udp 0 0 oracle-rac2-priv:55658 0.0.0.0:* 23183/oracle+ASM2_o
69udp 0 0 localhost:22929 0.0.0.0:* 22646/asm_psp0_+ASM
70udp 0 0 oracle-rac2-priv:22929 0.0.0.0:* 22646/asm_psp0_+ASM
71udp 0 0 localhost:39353 0.0.0.0:* 22735/asm_mmnl_+ASM
72udp 0 0 oracle-rac2-priv:39353 0.0.0.0:* 22735/asm_mmnl_+ASM
73udp 0 0 localhost:22975 0.0.0.0:* 25147/ora_p002_orcl
74udp 0 0 localhost:14794 0.0.0.0:* 23573/ora_gen0_orcl
75udp 0 0 oracle-rac2-priv:22987 0.0.0.0:* 25168/ora_p00b_orcl
76udp 0 0 oracle-rac2-priv:55783 0.0.0.0:* 25243/oracleorcl2
77udp 0 0 oracle-rac2-priv:47643 0.0.0.0:* 22700/asm_lmon_+ASM
78udp 0 0 localhost:55842 0.0.0.0:* 25129/ora_aqpc_orcl
79udp 0 0 localhost:47721 0.0.0.0:* 22719/asm_ckpt_+ASM
80udp 0 0 oracle-rac2-priv:47721 0.0.0.0:* 22719/asm_ckpt_+ASM
81udp 0 0 oracle-rac2-priv:39535 0.0.0.0:* 23771/ora_m000_orcl
82udp 0 0 localhost:31355 0.0.0.0:* 23645/ora_ckpt_orcl
83udp 0 0 localhost:64166 0.0.0.0:* 23604/ora_ping_orcl
84udp 0 0 oracle-rac2-priv:39604 0.0.0.0:* 22733/asm_mmon_+ASM
85udp 0 0 oracle-rac2-priv:55994 0.0.0.0:* 24994/ora_rcbg_orcl
86udp 0 0 localhost:64206 0.0.0.0:* 25157/ora_p007_orcl
87udp 0 0 localhost:15056 0.0.0.0:* 23621/ora_lmd0_orcl
88udp 0 0 oracle-rac2-priv:56120 0.0.0.0:* 23613/ora_dia0_orcl
89udp 0 0 0.0.0.0:39748 0.0.0.0:* 20532/ohasd.bin
90udp 0 0 oracle-rac2-priv:64330 0.0.0.0:* 23656/ora_reco_orcl
91udp 0 0 oracle-rac2-priv:39759 0.0.0.0:* 25174/ora_p00d_orcl
92udp 0 0 localhost:56173 0.0.0.0:* 24387/ora_tt01_orcl
93udp 0 0 oracle-rac2-priv:56174 0.0.0.0:* 22688/asm_ping_+ASM
94udp 0 0 oracle-rac2-priv:64446 0.0.0.0:* 22674/asm_mman_+ASM
95udp 0 0 oracle-rac2-priv:23524 0.0.0.0:* 25170/ora_p00c_orcl
96udp 0 0 oracle-rac2-priv:64486 0.0.0.0:* 23671/ora_mmon_orcl
97udp 0 0 localhost:48117 0.0.0.0:* 23629/ora_lmd1_orcl
98udp 0 0 localhost:15390 0.0.0.0:* 24988/ora_gtx0_orcl
99udp 0 0 localhost:15442 0.0.0.0:* 23877/oracle+ASM2_o
100udp 0 0 oracle-rac2-priv:15442 0.0.0.0:* 23877/oracle+ASM2_o
101udp 0 0 localhost:23737 0.0.0.0:* 23226/oracle+ASM2
102udp 0 0 oracle-rac2-priv:23737 0.0.0.0:* 23226/oracle+ASM2
103udp 0 0 oracle-rac2-priv:64705 0.0.0.0:* 22756/asm_scm0_+ASM
104udp 0 0 localhost:40137 0.0.0.0:* 23635/ora_lck1_orcl
105udp 0 0 localhost:64714 0.0.0.0:* 22783/asm_lck0_+ASM
106udp 0 0 oracle-rac2-priv:64714 0.0.0.0:* 22783/asm_lck0_+ASM
107udp 0 0 localhost:56541 0.0.0.0:* 23869/ora_mark_orcl
108udp 0 0 localhost:40160 0.0.0.0:* 25151/ora_p004_orcl
109udp 0 0 localhost:15585 0.0.0.0:* 23555/ora_pmon_orcl
110udp 0 0 localhost:40161 0.0.0.0:* 22723/asm_lreg_+ASM
111udp 0 0 oracle-rac2-priv:40161 0.0.0.0:* 22723/asm_lreg_+ASM
112udp 0 0 localhost:56641 0.0.0.0:* 25794/ora_m002_orcl
113udp 0 0 localhost:15693 0.0.0.0:* 23633/ora_lmhb_orcl
114udp 0 0 localhost:32105 0.0.0.0:* 25153/ora_p005_orcl
115udp 0 0 oracle-rac2-priv:23948 0.0.0.0:* 20532/ohasd.bin
116udp 0 0 localhost:48536 0.0.0.0:* 23667/ora_asmb_orcl
117udp 0 0 localhost:23963 0.0.0.0:* 23722/ora_lck0_orcl
118udp 0 0 oracle-rac2-priv:48541 0.0.0.0:* 25179/ora_p00f_orcl
119udp 0 0 oracle-rac2-priv:15831 0.0.0.0:* 24577/ora_w003_orcl
120udp 0 0 oracle-rac2-priv:40447 0.0.0.0:* 25281/ora_q004_orcl
121udp 0 0 oracle-rac2-priv:56834 0.0.0.0:* 25144/ora_p001_orcl
122udp 0 0 oracle-rac2-priv:15902 0.0.0.0:* 25166/ora_p00a_orcl
123udp 0 0 oracle-rac2-priv:65055 0.0.0.0:* 21835/octssd.bin
124udp 0 0 localhost:48692 0.0.0.0:* 25174/ora_p00d_orcl
125udp 0 0 localhost:32344 0.0.0.0:* 22696/asm_pman_+ASM
126udp 0 0 oracle-rac2-priv:32344 0.0.0.0:* 22696/asm_pman_+ASM
127udp 0 0 oracle-rac2-priv:56922 0.0.0.0:* 23875/oracle+ASM2_o
128udp 0 0 oracle-rac2-priv:48788 0.0.0.0:* 23671/ora_mmon_orcl
129udp 0 0 oracle-rac2-priv:56997 0.0.0.0:* 25177/ora_p00e_orcl
130udp 0 0 localhost:65213 0.0.0.0:* 23999/ora_fd00_orcl
131udp 0 0 oracle-rac2-priv:48859 0.0.0.0:* 22735/asm_mmnl_+ASM
132udp 0 0 oracle-rac2-priv:16109 0.0.0.0:* 23183/oracle+ASM2_o
133udp 0 0 localhost:16114 0.0.0.0:* 25243/oracleorcl2
134udp 0 0 localhost:48900 0.0.0.0:* 23606/ora_svcb_orcl
135udp 0 0 localhost:32544 0.0.0.0:* 25642/ora_gcr1_orcl
136udp 0 0 oracle-rac2-priv:16165 0.0.0.0:* 22672/asm_gen0_+ASM
137udp 0 0 localhost:16197 0.0.0.0:* 23619/ora_lms1_orcl
138udp 0 0 localhost:57173 0.0.0.0:* 23690/ora_imr0_orcl
139udp 0 0 localhost:8125 0.0.0.0:* 888/agent
140udp 0 0 localhost:32736 0.0.0.0:* 23674/oracle+ASM2_a
141udp 0 0 oracle-rac2-priv:32736 0.0.0.0:* 23674/oracle+ASM2_a
142udp 0 0 oracle-rac2-priv:57322 0.0.0.0:* 23640/ora_lgwr_orcl
143udp 0 0 localhost:16408 0.0.0.0:* 24384/ora_tt00_orcl
144udp 0 0 oracle-rac2-priv:24612 0.0.0.0:* 25149/ora_p003_orcl
145udp 0 0 0.0.0.0:bootpc 0.0.0.0:* 982/dhclient
146udp 0 0 0.0.0.0:bootpc 0.0.0.0:* 980/dhclient
147udp 0 0 oracle-rac2-priv:24668 0.0.0.0:* 25172/ora_cjq0_orcl
148udp 0 0 localhost:41054 0.0.0.0:* 22711/asm_lck1_+ASM
149udp 0 0 oracle-rac2-priv:41054 0.0.0.0:* 22711/asm_lck1_+ASM
150udp 0 0 oracle-rac2-priv:16492 0.0.0.0:* 23637/ora_dbw0_orcl
151udp 0 0 0.0.0.0:sunrpc 0.0.0.0:* 744/rpcbind
152udp 0 0 localhost:24775 0.0.0.0:* 25149/ora_p003_orcl
153udp 0 0 localhost:57558 0.0.0.0:* 23030/oracle+ASM2_a
154udp 0 0 oracle-rac2-priv:57558 0.0.0.0:* 23030/oracle+ASM2_a
155udp 0 0 localhost:57587 0.0.0.0:* 23617/ora_lms0_orcl
156udp 0 0 localhost:24840 0.0.0.0:* 25627/oracleorcl2
157udp 0 0 oracle-rac2-priv:41250 0.0.0.0:* 21136/gipcd.bin
158udp 0 0 oracle-rac2-priv:33071 0.0.0.0:* 23584/ora_diag_orcl
159udp 0 0 oracle-rac2-priv:33093 0.0.0.0:* 25170/ora_p00c_orcl
160udp 0 0 oracle-rac2-priv:49495 0.0.0.0:* 22642/asm_pmon_+ASM
161udp 0 0 localhost:25155 0.0.0.0:* 22698/asm_dia0_+ASM
162udp 0 0 oracle-rac2-priv:25155 0.0.0.0:* 22698/asm_dia0_+ASM
163udp 0 0 oracle-rac2-priv:16973 0.0.0.0:* 22700/asm_lmon_+ASM
164udp 0 0 localhost:49779 0.0.0.0:* 23617/ora_lms0_orcl
165udp 0 0 localhost:17025 0.0.0.0:* 25172/ora_cjq0_orcl
166udp 0 0 localhost:41610 0.0.0.0:* 23688/ora_s000_orcl
167udp 0 0 oracle-rac2-priv:25232 0.0.0.0:* 22728/asm_rbal_+ASM
168udp 0 0 oracle-rac2-priv:33485 0.0.0.0:* 21881/crsd.bin
169udp 0 0 oracle-rac2-priv:49876 0.0.0.0:* 25147/ora_p002_orcl
170udp 0 0 oracle-rac2-priv:33505 0.0.0.0:* 22737/asm_imr0_+ASM
171udp 0 0 oracle-rac2-priv:17126 0.0.0.0:* 25149/ora_p003_orcl
172udp 0 0 oracle-rac2-priv:41743 0.0.0.0:* 22644/asm_clmn_+ASM
173udp 0 0 oracle-rac2-priv:17195 0.0.0.0:* 23649/ora_smon_orcl
174udp 0 0 oracle-rac2-priv:17228 0.0.0.0:* 23615/ora_lmon_orcl
175udp 0 0 oracle-rac2-priv:58238 0.0.0.0:* 22696/asm_pman_+ASM
176udp 0 0 0.0.0.0:894 0.0.0.0:* 744/rpcbind
177udp 0 0 localhost:33666 0.0.0.0:* 22715/asm_lgwr_+ASM
178udp 0 0 oracle-rac2-priv:33666 0.0.0.0:* 22715/asm_lgwr_+ASM
179udp 0 0 localhost:17289 0.0.0.0:* 25707/ora_gcr0_orcl
180udp 0 0 oracle-rac2-priv:25512 0.0.0.0:* 23729/ora_rsmn_orcl
181udp 0 0 oracle-rac2-priv:9141 0.0.0.0:* 25164/ora_p009_orcl
182udp 0 0 localhost:41912 0.0.0.0:* 23663/ora_pxmn_orcl
183udp 0 0 oracle-rac2-priv:17385 0.0.0.0:* 25157/ora_p007_orcl
184udp 0 0 oracle-rac2-priv:58355 0.0.0.0:* 23621/ora_lmd0_orcl
185udp 0 0 oracle-rac2-priv:25597 0.0.0.0:* 23629/ora_lmd1_orcl
186udp 0 0 oracle-rac2-priv:58369 0.0.0.0:* 25157/ora_p007_orcl
187udp 0 0 localhost:42000 0.0.0.0:* 25267/oracleorcl2
188udp 0 0 oracle-rac2-priv:25625 0.0.0.0:* 25179/ora_p00f_orcl
189udp 0 0 localhost:58403 0.0.0.0:* 22678/asm_gen1_+ASM
190udp 0 0 oracle-rac2-priv:58403 0.0.0.0:* 22678/asm_gen1_+ASM
191udp 0 0 localhost:17490 0.0.0.0:* 23771/ora_m000_orcl
192udp 0 0 oracle-rac2-priv:17526 0.0.0.0:* 24111/ora_ppa7_orcl
193udp 0 0 oracle-rac2-priv:25724 0.0.0.0:* 25164/ora_p009_orcl
194udp 0 0 oracle-rac2-priv:42117 0.0.0.0:* 25151/ora_p004_orcl
195udp 0 0 oracle-rac2-priv:50348 0.0.0.0:* 25153/ora_p005_orcl
196udp 0 0 oracle-rac2-priv:58585 0.0.0.0:* 25142/ora_p000_orcl
197udp 0 0 oracle-rac2-priv:34027 0.0.0.0:* 25263/ora_qm05_orcl
198udp 0 0 oracle-rac2-priv:9486 0.0.0.0:* 23722/ora_lck0_orcl
199udp 0 0 localhost:50453 0.0.0.0:* 25155/ora_p006_orcl
200udp 0 0 localhost:17706 0.0.0.0:* 23602/ora_vkrm_orcl
201udp 0 0 localhost:25899 0.0.0.0:* 23706/ora_scm0_orcl
202udp 0 0 oracle-rac2-priv:42297 0.0.0.0:* 23615/ora_lmon_orcl
203udp 0 0 oracle-rac2-priv:58746 0.0.0.0:* 23661/ora_lreg_orcl
204udp 0 0 localhost:34186 0.0.0.0:* 22702/asm_lmd0_+ASM
205udp 0 0 oracle-rac2-priv:34186 0.0.0.0:* 22702/asm_lmd0_+ASM
206udp 0 0 localhost:58769 0.0.0.0:* 23619/ora_lms1_orcl
207udp 0 0 oracle-rac2-priv:42395 0.0.0.0:* 22730/asm_gmon_+ASM
208udp 0 0 oracle-rac2-priv:9637 0.0.0.0:* 22678/asm_gen1_+ASM
209udp 0 0 oracle-rac2-priv:42421 0.0.0.0:* 23020/oracle+ASM2_o
210udp 0 0 172.18.1.255:42424 0.0.0.0:* 21401/ocssd.bin
211udp 0 0 230.0.1.0:42424 0.0.0.0:* 21401/ocssd.bin
212udp 0 0 224.0.0.251:42424 0.0.0.0:* 21401/ocssd.bin
213udp 0 0 172.18.1.255:42424 0.0.0.0:* 20532/ohasd.bin
214udp 0 0 230.0.1.0:42424 0.0.0.0:* 20532/ohasd.bin
215udp 0 0 224.0.0.251:42424 0.0.0.0:* 20532/ohasd.bin
216udp 0 0 172.18.1.255:42424 0.0.0.0:* 21136/gipcd.bin
217udp 0 0 230.0.1.0:42424 0.0.0.0:* 21136/gipcd.bin
218udp 0 0 224.0.0.251:42424 0.0.0.0:* 21136/gipcd.bin
219udp 0 0 oracle-rac2-priv:9675 0.0.0.0:* 23613/ora_dia0_orcl
220udp 0 0 oracle-rac2-priv:42496 0.0.0.0:* 23181/asm_o000_+ASM
221udp 0 0 localhost:34349 0.0.0.0:* 24994/ora_rcbg_orcl
222udp 0 0 oracle-rac2-priv:18015 0.0.0.0:* 23619/ora_lms1_orcl
223udp 0 0 oracle-rac2-priv:59010 0.0.0.0:* 23604/ora_ping_orcl
224udp 0 0 oracle-rac2-priv:18077 0.0.0.0:* 23593/ora_ofsd_orcl
225udp 0 0 localhost:26303 0.0.0.0:* 23609/ora_acms_orcl
226udp 0 0 oracle-rac2-priv:42715 0.0.0.0:* 23593/ora_ofsd_orcl
227udp 0 0 oracle-rac2-priv:18143 0.0.0.0:* 25129/ora_aqpc_orcl
228udp 0 0 oracle-rac2-priv:18159 0.0.0.0:* 25155/ora_p006_orcl
229udp 0 0 localhost:50954 0.0.0.0:* 22756/asm_scm0_+ASM
230udp 0 0 oracle-rac2-priv:50954 0.0.0.0:* 22756/asm_scm0_+ASM
231udp 0 0 oracle-rac2-priv:26420 0.0.0.0:* 25149/ora_p003_orcl
232udp 0 0 localhost:42857 0.0.0.0:* 23661/ora_lreg_orcl
233udp 0 0 oracle-rac2-priv:42875 0.0.0.0:* 23771/ora_m000_orcl
234udp 0 0 localhost:51076 0.0.0.0:* 23686/ora_d000_orcl
235udp 0 0 oracle-rac2-priv:34706 0.0.0.0:* 25174/ora_p00d_orcl
236udp 0 0 localhost:59285 0.0.0.0:* 22678/asm_gen1_+ASM
237udp 0 0 oracle-rac2-priv:59285 0.0.0.0:* 22678/asm_gen1_+ASM
238udp 0 0 oracle-rac2-priv:59321 0.0.0.0:* 22678/asm_gen1_+ASM
239udp 0 0 oracle-rac2-priv:51150 0.0.0.0:* 23706/ora_scm0_orcl
240udp 0 0 localhost:51160 0.0.0.0:* 23640/ora_lgwr_orcl
241udp 0 0 oracle-rac2-priv:34800 0.0.0.0:* 23030/oracle+ASM2_a
242udp 0 0 oracle-rac2-priv:18466 0.0.0.0:* 23606/ora_svcb_orcl
243udp 0 0 oracle-rac2-priv:34866 0.0.0.0:* 22783/asm_lck0_+ASM
244udp 0 0 localhost:26707 0.0.0.0:* 23649/ora_smon_orcl
245udp 0 0 localhost:43102 0.0.0.0:* 23637/ora_dbw0_orcl
246udp 0 0 oracle-rac2-priv:43131 0.0.0.0:* 23600/ora_dbrm_orcl
247udp 0 0 localhost:43141 0.0.0.0:* 24111/ora_ppa7_orcl
248udp 0 0 oracle-rac2-priv:59539 0.0.0.0:* 22688/asm_ping_+ASM
249udp 0 0 localhost:10435 0.0.0.0:* 22700/asm_lmon_+ASM
250udp 0 0 oracle-rac2-priv:10435 0.0.0.0:* 22700/asm_lmon_+ASM
251udp 0 0 oracle-rac2-priv:43207 0.0.0.0:* 23643/ora_lg00_orcl
252udp 0 0 localhost:26834 0.0.0.0:* 23631/ora_rms0_orcl
253udp 0 0 oracle-rac2-priv:51435 0.0.0.0:* 22725/asm_pxmn_+ASM
254udp 0 0 oracle-rac:MOS-soap-opt 0.0.0.0:* 23665/ora_rbal_orcl
255udp 0 0 localhost:35150 0.0.0.0:* 23579/ora_gen1_orcl
256udp 0 0 oracle-rac2-priv:59735 0.0.0.0:* 23722/ora_lck0_orcl
257udp 0 0 oracle-rac2-priv:18823 0.0.0.0:* 25164/ora_p009_orcl
258udp 0 0 localhost:59827 0.0.0.0:* 25170/ora_p00c_orcl
259udp 0 0 oracle-rac2-priv:59838 0.0.0.0:* 22698/asm_dia0_+ASM
260udp 0 0 oracle-rac2-priv:43455 0.0.0.0:* 24111/ora_ppa7_orcl
261udp 0 0 oracle-rac2-priv:10703 0.0.0.0:* 25166/ora_p00a_orcl
262udp 0 0 oracle-rac2-priv:18902 0.0.0.0:* 22702/asm_lmd0_+ASM
263udp 0 0 oracle-rac2-priv:35342 0.0.0.0:* 23619/ora_lms1_orcl
264udp 0 0 oracle-rac2-priv:35365 0.0.0.0:* 23023/asm_ppa7_+ASM
265udp 0 0 oracle-rac2-priv:43580 0.0.0.0:* 24111/ora_ppa7_orcl
266udp 0 0 localhost:27274 0.0.0.0:* 22721/asm_smon_+ASM
267udp 0 0 oracle-rac2-priv:27274 0.0.0.0:* 22721/asm_smon_+ASM
268udp 0 0 oracle-rac2-priv:35486 0.0.0.0:* 25162/ora_p008_orcl
269udp 0 0 oracle-rac2-priv:10924 0.0.0.0:* 23661/ora_lreg_orcl
270udp 0 0 oracle-rac2-priv:27314 0.0.0.0:* 25263/ora_qm05_orcl
271udp 0 0 localhost:60100 0.0.0.0:* 23665/ora_rbal_orcl
272udp 0 0 oracle-rac2-priv:27365 0.0.0.0:* 25172/ora_cjq0_orcl
273udp 0 0 oracle-rac2-priv:11038 0.0.0.0:* 22715/asm_lgwr_+ASM
274udp 0 0 oracle-rac2-priv:35655 0.0.0.0:* 23584/ora_diag_orcl
275udp 0 0 oracle-rac2-priv:52111 0.0.0.0:* 23606/ora_svcb_orcl
276udp 0 0 oracle-rac2-priv:43936 0.0.0.0:* 25823/ora_m003_orcl
277udp 0 0 oracle-rac2-priv:27552 0.0.0.0:* 25784/ora_m001_orcl
278udp 0 0 localhost:43942 0.0.0.0:* 23593/ora_ofsd_orcl
279udp 0 0 oracle-rac2-priv:11189 0.0.0.0:* 22723/asm_lreg_+ASM
280udp 0 0 localhost:43998 0.0.0.0:* 24487/ora_w002_orcl
281udp 0 0 localhost:60414 0.0.0.0:* 23569/ora_vktm_orcl
282udp 0 0 oracle-rac2-priv:60447 0.0.0.0:* 22721/asm_smon_+ASM
283udp 0 0 localhost:44097 0.0.0.0:* 23669/ora_fenc_orcl
284udp 0 0 oracle-rac2-priv:19608 0.0.0.0:* 25784/ora_m001_orcl
285udp 0 0 oracle-rac2-priv:19629 0.0.0.0:* 23722/ora_lck0_orcl
286udp 0 0 oracle-rac2-priv:52418 0.0.0.0:* 25155/ora_p006_orcl
287udp 0 0 localhost:44263 0.0.0.0:* 23643/ora_lg00_orcl
288udp 0 0 oracle-rac2-priv:44273 0.0.0.0:* 25151/ora_p004_orcl
289udp 0 0 oracle-rac2-priv:11559 0.0.0.0:* 22781/asm_gcr0_+ASM
290udp 0 0 localhost:27954 0.0.0.0:* 25144/ora_p001_orcl
291udp 0 0 localhost:11588 0.0.0.0:* 25823/ora_m003_orcl
292udp 0 0 oracle-rac2-priv:60756 0.0.0.0:* 25144/ora_p001_orcl
293udp 0 0 localhost:28018 0.0.0.0:* 23875/oracle+ASM2_o
294udp 0 0 oracle-rac2-priv:28018 0.0.0.0:* 23875/oracle+ASM2_o
295udp 0 0 localhost:28040 0.0.0.0:* 23647/ora_lg01_orcl
296udp 0 0 oracle-rac2-priv:36233 0.0.0.0:* 22704/asm_lms0_+ASM
297udp 0 0 oracle-rac2-priv:36242 0.0.0.0:* 23615/ora_lmon_orcl
298udp 0 0 oracle-rac2-priv:11675 0.0.0.0:* 22646/asm_psp0_+ASM
299udp 0 0 localhost:52661 0.0.0.0:* 22704/asm_lms0_+ASM
300udp 0 0 oracle-rac2-priv:52661 0.0.0.0:* 22704/asm_lms0_+ASM
301udp 0 0 localhost:60857 0.0.0.0:* 22709/asm_lmhb_+ASM
302udp 0 0 oracle-rac2-priv:60857 0.0.0.0:* 22709/asm_lmhb_+ASM
303udp 0 0 oracle-rac2-priv:19908 0.0.0.0:* 23617/ora_lms0_orcl
304udp 0 0 oracle-rac2-priv:44508 0.0.0.0:* 23729/ora_rsmn_orcl
305udp 0 0 oracle-rac2-priv:52741 0.0.0.0:* 25263/ora_qm05_orcl
306udp 0 0 localhost:19982 0.0.0.0:* 23593/ora_ofsd_orcl
307udp 0 0 localhost:44580 0.0.0.0:* 23873/ora_o000_orcl
308udp 0 0 oracle-rac2-priv:44609 0.0.0.0:* 25168/ora_p00b_orcl
309udp 0 0 oracle-rac2-priv:52808 0.0.0.0:* 23604/ora_ping_orcl
310udp 0 0 localhost:20066 0.0.0.0:* 23658/ora_w001_orcl
311udp 0 0 oracle-rac2-priv:20068 0.0.0.0:* 25177/ora_p00e_orcl
312udp 0 0 oracle-rac2-priv:44653 0.0.0.0:* 21401/ocssd.bin
313udp 0 0 oracle-rac2-priv:28321 0.0.0.0:* 25784/ora_m001_orcl
314udp 0 0 oracle-rac2-priv:36517 0.0.0.0:* 25142/ora_p000_orcl
315udp 0 0 localhost:20146 0.0.0.0:* 22672/asm_gen0_+ASM
316udp 0 0 oracle-rac2-priv:20146 0.0.0.0:* 22672/asm_gen0_+ASM
317udp 0 0 localhost:11968 0.0.0.0:* 23600/ora_dbrm_orcl
318udp 0 0 localhost:11980 0.0.0.0:* 22737/asm_imr0_+ASM
319udp 0 0 oracle-rac2-priv:11980 0.0.0.0:* 22737/asm_imr0_+ASM
320udp 0 0 oracle-rac2-priv:44841 0.0.0.0:* 23584/ora_diag_orcl
321udp 0 0 oracle-rac2-priv:36677 0.0.0.0:* 25179/ora_p00f_orcl
322udp 0 0 localhost:28529 0.0.0.0:* 23651/ora_smco_orcl
323udp 0 0 oracle-rac2-priv:53121 0.0.0.0:* 25155/ora_p006_orcl
324udp 0 0 oracle-rac2-priv:12183 0.0.0.0:* 22711/asm_lck1_+ASM
325udp 0 0 oracle-rac2-priv:61337 0.0.0.0:* 25151/ora_p004_orcl
326udp 0 0 localhost:20388 0.0.0.0:* 23671/ora_mmon_orcl
327udp 0 0 localhost:28587 0.0.0.0:* 23575/ora_mman_orcl
328udp 0 0 oracle-rac2-priv:20437 0.0.0.0:* 25174/ora_p00d_orcl
329udp 0 0 oracle-rac2-priv:20441 0.0.0.0:* 25243/oracleorcl2
330udp 0 0 localhost:61415 0.0.0.0:* 23557/ora_clmn_orcl
331udp 0 0 oracle-rac2-priv:12274 0.0.0.0:* 22668/asm_vktm_+ASM
332udp 0 0 localhost:61445 0.0.0.0:* 23020/oracle+ASM2_o
333udp 0 0 oracle-rac2-priv:61445 0.0.0.0:* 23020/oracle+ASM2_o
334udp 0 0 localhost:12303 0.0.0.0:* 24596/ora_w004_orcl
335udp 0 0 localhost:12325 0.0.0.0:* 25166/ora_p00a_orcl
336udp 0 0 oracle-rac2-priv:20543 0.0.0.0:* 22704/asm_lms0_+ASM
337udp 0 0 localhost:53312 0.0.0.0:* 23656/ora_reco_orcl
338udp 0 0 oracle-rac2-priv:28741 0.0.0.0:* 22783/asm_lck0_+ASM
339udp 0 0 localhost:28748 0.0.0.0:* 22704/asm_lms0_+ASM
340udp 0 0 oracle-rac2-priv:28748 0.0.0.0:* 22704/asm_lms0_+ASM
341udp 0 0 localhost:28767 0.0.0.0:* 22781/asm_gcr0_+ASM
342udp 0 0 oracle-rac2-priv:28767 0.0.0.0:* 22781/asm_gcr0_+ASM
343udp 0 0 oracle-rac2-priv:36968 0.0.0.0:* 23226/oracle+ASM2
344udp 0 0 oracle-rac2-priv:53365 0.0.0.0:* 25162/ora_p008_orcl
345udp 0 0 localhost:37008 0.0.0.0:* 22674/asm_mman_+ASM
346udp 0 0 oracle-rac2-priv:37008 0.0.0.0:* 22674/asm_mman_+ASM
347udp 0 0 localhost:61739 0.0.0.0:* 23613/ora_dia0_orcl
348udp 0 0 oracle-rac2-priv:37178 0.0.0.0:* 25153/ora_p005_orcl
349udp 0 0 localhost:12604 0.0.0.0:* 23023/asm_ppa7_+ASM
350udp 0 0 oracle-rac2-priv:12604 0.0.0.0:* 23023/asm_ppa7_+ASM
351udp 0 0 localhost:20811 0.0.0.0:* 25784/ora_m001_orcl
352udp 0 0 oracle-rac2-priv:29025 0.0.0.0:* 23647/ora_lg01_orcl
353udp 0 0 oracle-rac2-priv:61800 0.0.0.0:* 25162/ora_p008_orcl
354udp 0 0 localhost:61801 0.0.0.0:* 24389/ora_tt02_orcl
355udp 0 0 localhost:37252 0.0.0.0:* 23584/ora_diag_orcl
356udp 0 0 oracle-rac2-priv:45454 0.0.0.0:* 20910/evmd.bin
357udp 0 0 oracle-rac2-priv:12715 0.0.0.0:* 22709/asm_lmhb_+ASM
358udp 0 0 localhost:61907 0.0.0.0:* 23559/ora_psp0_orcl
359udp 0 0 localhost:29206 0.0.0.0:* 23611/ora_pman_orcl
360udp 0 0 oracle-rac2-priv:12849 0.0.0.0:* 25166/ora_p00a_orcl
361udp 0 0 localhost:53824 0.0.0.0:* 23729/ora_rsmn_orcl
362udp 0 0 oracle-rac2-priv:53852 0.0.0.0:* 25168/ora_p00b_orcl
363udp 0 0 oracle-rac2-priv:37473 0.0.0.0:* 25819/oracle+ASM2_m
364udp 0 0 localhost:12934 0.0.0.0:* 25819/oracle+ASM2_m
365udp 0 0 oracle-rac2-priv:12934 0.0.0.0:* 25819/oracle+ASM2_m
366udp 0 0 localhost:53907 0.0.0.0:* 23266/oracle+ASM2
367udp 0 0 oracle-rac2-priv:53907 0.0.0.0:* 23266/oracle+ASM2
368udp 0 0 oracle-rac2-priv:37588 0.0.0.0:* 23617/ora_lms0_orcl
369udp 0 0 oracle-rac2-priv:45797 0.0.0.0:* 22704/asm_lms0_+ASM
370udp 0 0 localhost:62186 0.0.0.0:* 24577/ora_w003_orcl
371udp 0 0 oracle-rac2-priv:45817 0.0.0.0:* 23025/asm_asmb_+ASM
372udp 0 0 oracle-rac2-priv:13080 0.0.0.0:* 23266/oracle+ASM2
373udp 0 0 oracle-rac2-priv:54094 0.0.0.0:* 22840/asm_m000_+ASM
374udp 0 0 oracle-rac2-priv:13141 0.0.0.0:* 23621/ora_lmd0_orcl
375udp 0 0 localhost:21365 0.0.0.0:* 22733/asm_mmon_+ASM
376udp 0 0 oracle-rac2-priv:21365 0.0.0.0:* 22733/asm_mmon_+ASM
377udp 0 0 oracle-rac2-priv:21393 0.0.0.0:* 25153/ora_p005_orcl
378udp 0 0 oracle-rac2-priv:45976 0.0.0.0:* 25627/oracleorcl2
379udp 0 0 localhost:21429 0.0.0.0:* 25263/ora_qm05_orcl
380udp 0 0 localhost:13239 0.0.0.0:* 22642/asm_pmon_+ASM
381udp 0 0 oracle-rac2-priv:13239 0.0.0.0:* 22642/asm_pmon_+ASM
382udp 0 0 localhost:13275 0.0.0.0:* 22682/asm_diag_+ASM
383udp 0 0 oracle-rac2-priv:13275 0.0.0.0:* 22682/asm_diag_+ASM
384udp 0 0 oracle-rac2-priv:21478 0.0.0.0:* 25147/ora_p002_orcl
385udp 0 0 localhost:62473 0.0.0.0:* 23619/ora_lms1_orcl
386udp 0 0 oracle-rac2-priv:46107 0.0.0.0:* 23877/oracle+ASM2_o
387udp 0 0 localhost:46113 0.0.0.0:* 23615/ora_lmon_orcl
388udp 0 0 oracle-rac2-priv:37990 0.0.0.0:* 23722/ora_lck0_orcl
389udp 0 0 localhost:62591 0.0.0.0:* 23653/ora_w000_orcl
390udp 0 0 localhost:62603 0.0.0.0:* 25177/ora_p00e_orcl
391udp 0 0 localhost:29836 0.0.0.0:* 23025/asm_asmb_+ASM
392udp 0 0 oracle-rac2-priv:29836 0.0.0.0:* 23025/asm_asmb_+ASM
393udp 0 0 localhost:21677 0.0.0.0:* 25164/ora_p009_orcl
394udp 0 0 oracle-rac2-priv:21677 0.0.0.0:* 23573/ora_gen0_orcl
395udp 0 0 oracle-rac2-priv:62639 0.0.0.0:* 23606/ora_svcb_orcl
396udp 0 0 localhost:54450 0.0.0.0:* 22725/asm_pxmn_+ASM
397udp 0 0 oracle-rac2-priv:54450 0.0.0.0:* 22725/asm_pxmn_+ASM
398udp 0 0 oracle-rac2-priv:46268 0.0.0.0:* 23649/ora_smon_orcl
399udp 0 0 localhost:13524 0.0.0.0:* 22728/asm_rbal_+ASM
400udp 0 0 oracle-rac2-priv:13524 0.0.0.0:* 22728/asm_rbal_+ASM
401udp6 0 0 [::]:mdns [::]:* 20912/mdnsd.bin
402udp6 0 0 [::]:mdns [::]:* 20912/mdnsd.bin
403udp6 0 0 localhost:47775 [::]:* 23688/ora_s000_orcl
404udp6 0 0 localhost:64529 [::]:* 23686/ora_d000_orcl
405udp6 0 0 localhost:48670 [::]:* 25267/oracleorcl2
406udp6 0 0 [::]:sunrpc [::]:* 744/rpcbind
407udp6 0 0 localhost:41276 [::]:* 23674/oracle+ASM2_a
408udp6 0 0 [::]:894 [::]:* 744/rpcbind
409udp6 0 0 localhost:51484 [::]:* 22723/asm_lreg_+ASM
410udp6 0 0 localhost:43907 [::]:* 23661/ora_lreg_orcl
411udp6 0 0 localhost:53992 [::]:* 23030/oracle+ASM2_a
412udp6 0 0 localhost:21720 [::]:* 20532/ohasd.bin
413[root@oracle-rac2 ~]# netstat -ulp | wc -l
414412
415
416[root@oracle-rac2 ~]# netstat -tlp
417Active Internet connections (only servers)
418Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name
419tcp 0 0 localhost:8126 0.0.0.0:* LISTEN 891/trace-agent
420tcp 0 0 localhost:bootserver 0.0.0.0:* LISTEN 20878/oraagent.bin
421tcp 0 0 oracle-rac2-priv:12132 0.0.0.0:* LISTEN 22337/tnslsnr
422tcp 0 0 0.0.0.0:44132 0.0.0.0:* LISTEN 21039/gpnpd.bin
423tcp 0 0 localhost:commplex-main 0.0.0.0:* LISTEN 888/agent
424tcp 0 0 localhost:commplex-link 0.0.0.0:* LISTEN 888/agent
425tcp 0 0 oracle-rac2-priv:11402 0.0.0.0:* LISTEN 21258/osysmond.bin
426tcp 0 0 localhost:6062 0.0.0.0:* LISTEN 890/process-agent
427tcp 0 0 0.0.0.0:sunrpc 0.0.0.0:* LISTEN 744/rpcbind
428tcp 0 0 oracle-rac2-vi:ncube-lm 0.0.0.0:* LISTEN 22337/tnslsnr
429tcp 0 0 localhost:ncube-lm 0.0.0.0:* LISTEN 22337/tnslsnr
430tcp 0 0 localhost:synchronet-db 0.0.0.0:* LISTEN 22473/ons
431tcp 0 0 oracle-rac2:prospero-np 0.0.0.0:* LISTEN 22392/tnslsnr
432tcp 0 0 localhost:findviatv 0.0.0.0:* LISTEN 1071/xrdp-sesman
433tcp 0 0 0.0.0.0:ssh 0.0.0.0:* LISTEN 1931/sshd
434tcp 0 0 localhost:smtp 0.0.0.0:* LISTEN 1401/master
435tcp6 0 0 [::]:fcp-addr-srvr1 [::]:* LISTEN 22337/tnslsnr
436tcp6 0 0 localhost:bootserver [::]:* LISTEN 20878/oraagent.bin
437tcp6 0 0 [::]:29102 [::]:* LISTEN 23686/ora_d000_orcl
438tcp6 0 0 [::]:sunrpc [::]:* LISTEN 744/rpcbind
439tcp6 0 0 localhost:synchronet-db [::]:* LISTEN 22473/ons
440tcp6 0 0 [::]:lm-x [::]:* LISTEN 22473/ons
441tcp6 0 0 localhost:smtp [::]:* LISTEN 1401/master
442
443
444[root@oracle-rac2 ~]# netstat -utlp | grep tns
445tcp 0 0 oracle-rac2-priv:12132 0.0.0.0:* LISTEN 22337/tnslsnr
446tcp 0 0 oracle-rac2-vi:ncube-lm 0.0.0.0:* LISTEN 22337/tnslsnr
447tcp 0 0 localhost:ncube-lm 0.0.0.0:* LISTEN 22337/tnslsnr
448tcp 0 0 oracle-rac2:prospero-np 0.0.0.0:* LISTEN 22392/tnslsnr
449tcp6 0 0 [::]:fcp-addr-srvr1 [::]:* LISTEN 22337/tnslsnr
450[root@oracle-rac2 ~]# netstat -utlp | grep pmon
451udp 0 0 localhost:15585 0.0.0.0:* 23555/ora_pmon_orcl
452udp 0 0 oracle-rac2-priv:49495 0.0.0.0:* 22642/asm_pmon_+ASM
453udp 0 0 localhost:13239 0.0.0.0:* 22642/asm_pmon_+ASM
454udp 0 0 oracle-rac2-priv:13239 0.0.0.0:* 22642/asm_pmon_+ASM
455[root@oracle-rac2 ~]# netstat -utlp | grep ocssd
456udp 0 0 172.18.1.255:42424 0.0.0.0:* 21401/ocssd.bin
457udp 0 0 230.0.1.0:42424 0.0.0.0:* 21401/ocssd.bin
458udp 0 0 224.0.0.251:42424 0.0.0.0:* 21401/ocssd.bin
459udp 0 0 oracle-rac2-priv:44653 0.0.0.0:* 21401/ocssd.bin
460[root@oracle-rac2 ~]# netstat -utlp | grep ohas
461udp 0 0 0.0.0.0:39748 0.0.0.0:* 20532/ohasd.bin
462udp 0 0 oracle-rac2-priv:23948 0.0.0.0:* 20532/ohasd.bin
463udp 0 0 172.18.1.255:42424 0.0.0.0:* 20532/ohasd.bin
464udp 0 0 230.0.1.0:42424 0.0.0.0:* 20532/ohasd.bin
465udp 0 0 224.0.0.251:42424 0.0.0.0:* 20532/ohasd.bin
466udp6 0 0 localhost:21720 [::]:* 20532/ohasd.bin
所以,在云平台上安装rac,在安全组里,必须开启2个节点之间的所有udp和tcp端口。
找一个单机的Oracle环境:
1[root@lhrora11203 /]# netstat -tulp
2Active Internet connections (only servers)
3Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name
4tcp 0 0 *:25072 *:* LISTEN 968/ora_d000_LHR11G
5tcp 0 0 *:ncube-lm *:* LISTEN 844/tnslsnr
6tcp 0 0 *:ssh *:* LISTEN 790/sshd
7tcp 0 0 localhost:ipp *:* LISTEN 746/cupsd
8tcp 0 0 *:ssh *:* LISTEN 790/sshd
9udp 0 0 localhost:27586 *:* 968/ora_d000_LHR11G
10udp 0 0 localhost:65056 *:* 936/ora_pmon_LHR11G
11udp 0 0 *:ipp *:* 746/cupsd
12udp 0 0 *:42111 *:* 964/ora_mmon_LHR11G
13udp 0 0 localhost:48514 *:* 970/ora_s000_LHR11G
也有部分udp端口。