Hello,develop team.I test multipath in RHEL 6.2(X86_64).I find a
issue when test takeover.There are 8 disk in one target path;7 disk is
active,but 1 is faulty(mpathf).I check target ,and there is well;I didn't how
can i avoid the issue. I find some different between active and faulty var
/var/log/message.The mpathf print "map in use,unable to flush devmap".Is there
some helpful?
[root@localhost ~]# multipath -ll
mpathe (23431373633313138) dm-8 _INSPUR_,vg5_lv52
size=300G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:6 sdq 65:0 active ready running
mpathd (23735393139393831) dm-10 _INSPUR_,vg5_lv51
size=350G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:2 sdm 8:192 active ready running
mpathc (26238643332373337) dm-9 _INSPUR_,vg4_lv41
size=300G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:1 sdl 8:176 active ready running
mpathj (23238623936303261) dm-4 _INSPUR_,vg5_lv54
size=350G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:8 sds 65:32 active ready running
mpathi (23966393631376439) dm-7 _INSPUR_,vg4_lv43
size=300G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:4 sdo 8:224 active ready running
mpathh (26535666264653963) dm-3 _INSPUR_,vg4_lv44
size=300G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:5 sdp 8:240 active ready running
mpathg (23532643461393666) dm-6 _INSPUR_,vg5_lv53
size=350G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=1 status=active
`- 19:0:0:7 sdr 65:16 active ready running
mpathf (23863333438666165) dm-5 ,
size=300G features='1 queue_if_no_path' hwhandler='0' wp=rw
`-+- policy='round-robin 0' prio=0 status=enabled
|- #:#:#:# - #:# failed faulty running
`- 19:0:0:3 sdn 8:208 failed faulty running
[root@localhost ~]# dmsetup -v status
Name: mpathe
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 7
Major, minor: 253, 8
Number of targets: 1
UUID: mpath-23431373633313138
0 629145600 multipath 2 0 0 0 1 1 A 0 1 0 65:0 A 0
Name: VolGroup-lv_swap
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 0
Major, minor: 253, 1
Number of targets: 1
UUID: LVM-g146OXH4QW6yuzxVPQg6MTowS4n70ZQE2yXeAnBJ0rzl35hQd5ENo9JrFuavLaZ4
0 8192000 linear
Name: mpathd
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 7
Major, minor: 253, 10
Number of targets: 1
UUID: mpath-23735393139393831
0 734003200 multipath 2 0 0 0 1 1 A 0 1 0 8:192 A 0
Name: VolGroup-lv_root
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 0
Major, minor: 253, 0
Number of targets: 1
UUID: LVM-g146OXH4QW6yuzxVPQg6MTowS4n70ZQEJuESdV8wAAi6xtZJsvue0MMdKwv8JrD1
0 104857600 linear
Name: mpathc
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 8
Major, minor: 253, 9
Number of targets: 1
UUID: mpath-26238643332373337
0 629145600 multipath 2 0 0 0 1 1 A 0 1 0 8:176 A 0
Name: mpathj
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 7
Major, minor: 253, 4
Number of targets: 1
UUID: mpath-23238623936303261
0 734003200 multipath 2 0 0 0 1 1 A 0 1 0 65:32 A 0
Name: mpathi
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 8
Major, minor: 253, 7
Number of targets: 1
UUID: mpath-23966393631376439
0 629145600 multipath 2 0 0 0 1 1 A 0 1 0 8:224 A 0
Name: mpathh
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 7
Major, minor: 253, 3
Number of targets: 1
UUID: mpath-26535666264653963
0 629145600 multipath 2 0 0 0 1 1 A 0 1 0 8:240 A 0
Name: VolGroup-lv_home
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 0
Major, minor: 253, 2
Number of targets: 1
UUID: LVM-g146OXH4QW6yuzxVPQg6MTowS4n70ZQEXnAad4uTeBensk60oEaKeheiQTGKySgB
0 1839439872 linear
Name: mpathg
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 0
Event number: 8
Major, minor: 253, 6
Number of targets: 1
UUID: mpath-23532643461393666
0 734003200 multipath 2 0 0 0 1 1 A 0 1 0 65:16 A 0
Name: mpathf
State: ACTIVE
Read Ahead: 256
Tables present: LIVE
Open count: 1
Event number: 9
Major, minor: 253, 5
Number of targets: 1
UUID: mpath-23863333438666165
0 629145600 multipath 2 39 0 0 1 1 E 0 2 0 8:64 F 1 8:208 F 1
May 2 21:54:02 localhost kernel: sd 19:0:0:3: [sdn] Unhandled error code
May 2 21:54:02 localhost kernel: sd 19:0:0:3: [sdn] Result: hostbyte=DID_NO_CONNECT driverbyte=DRIVER_OK
May 2 21:54:02 localhost kernel: sd 19:0:0:3: [sdn] CDB: Write(10): 2a 00 0a 21 b2 00 00 00 60 00
May 2 21:54:02 localhost kernel: end_request: I/O error, dev sdn, sector 169980416
May 2 21:54:03 localhost multipathd: sdb: remove path (uevent)
May 2 21:54:03 localhost multipathd: sdc: remove path (uevent)
May 2 21:54:03 localhost kernel: device-mapper: table: 253:5: multipath: error getting device
May 2 21:54:03 localhost kernel: device-mapper: ioctl: error adding target to table
May 2 21:54:03 localhost kernel: device-mapper: table: 253:5: multipath: error getting device
May 2 21:54:03 localhost kernel: device-mapper: ioctl: error adding target to table
May 2 21:54:03 localhost multipathd: mpathc: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:176 1]
May 2 21:54:03 localhost multipathd: sdc: path removed from map mpathc
May 2 21:54:03 localhost multipathd: sdd: remove path (uevent)
May 2 21:54:03 localhost multipathd: sdg: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathh: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:240 1]
May 2 21:54:03 localhost multipathd: sdg: path removed from map mpathh
May 2 21:54:03 localhost multipathd: sdf: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathi: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:224 1]
May 2 21:54:03 localhost multipathd: sdf: path removed from map mpathi
May 2 21:54:03 localhost multipathd: sdi: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathg: load table [0 734003200 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:16 1]
May 2 21:54:03 localhost multipathd: sdi: path removed from map mpathg
May 2 21:54:03 localhost multipathd: sdj: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathj: load table [0 734003200 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:32 1]
May 2 21:54:03 localhost multipathd: sdj: path removed from map mpathj
May 2 21:54:03 localhost multipathd: sde: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathf: failed in domap for removal of path sde
May 2 21:54:03 localhost multipathd: uevent trigger error
May 2 21:54:03 localhost multipathd: sdh: remove path (uevent)
May 2 21:54:03 localhost multipathd: mpathe: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:0 1]
May 2 21:54:03 localhost multipathd: sdh: path removed from map mpathe
May 2 21:54:03 localhost multipathd: mpathf: Entering recovery mode: max_retries=300
May 2 21:54:03 localhost multipathd: 8:64: mark as failed
May 2 21:54:03 localhost multipathd: mpathf: Entering recovery mode: max_retries=300
May 2 21:54:03 localhost multipathd: 8:64: mark as failed
May 2 21:55:19 localhost multipathd: reconfigure (SIGHUP)
May 2 21:55:19 localhost multipathd: mpathc: stop event checker thread (140270187837184)
May 2 21:55:19 localhost multipathd: mpathh: stop event checker thread (140270180927232)
May 2 21:55:19 localhost multipathd: mpathf: stop event checker thread (140270180894464)
May 2 21:55:19 localhost multipathd: mpathi: stop event checker thread (140270180861696)
May 2 21:55:19 localhost multipathd: mpathe: stop event checker thread (140270180828928)
May 2 21:55:19 localhost multipathd: mpathg: stop event checker thread (140270180796160)
May 2 21:55:19 localhost multipathd: mpathj: stop event checker thread (140270180763392)
May 2 21:55:19 localhost multipathd: mpathd: stop event checker thread (140270180730624)
May 2 21:55:20 localhost kernel: device-mapper: table: 253:11: multipath: error getting device
May 2 21:55:20 localhost kernel: device-mapper: ioctl: error adding target to table
May 2 21:55:20 localhost kernel: device-mapper: table: 253:11: multipath: error getting device
May 2 21:55:20 localhost kernel: device-mapper: ioctl: error adding target to table
May 2 21:55:20 localhost multipathd: mpatha: ignoring map
May 2 21:55:20 localhost multipathd: mpathc: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:176 1]
May 2 21:55:20 localhost multipathd: mpathd: load table [0 734003200 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:192 1]
May 2 21:55:20 localhost multipathd: mpathg: load table [0 734003200 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:16 1]
May 2 21:55:20 localhost multipathd: mpathe: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:0 1]
May 2 21:55:20 localhost multipathd: mpathh: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:240 1]
May 2 21:55:20 localhost multipathd: mpathi: load table [0 629145600 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 8:224 1]
May 2 21:55:20 localhost multipathd: mpathj: load table [0 734003200 multipath 1 queue_if_no_path 0 1 1 round-robin 0 1 1 65:32 1]
May 2 21:55:20 localhost multipathd: mpathf: map in use
May 2 21:55:20 localhost multipathd: mpathf: unable to flush devmap
May 2 21:55:20 localhost multipathd: mpathc: event checker started
May 2 21:55:20 localhost multipathd: mpathd: event checker started
May 2 21:55:20 localhost multipathd: mpathg: event checker started
May 2 21:55:20 localhost multipathd: mpathe: event checker started
May 2 21:55:20 localhost multipathd: mpathh: event checker started
May 2 21:55:20 localhost multipathd: mpathi: event checker started
May 2 21:55:20 localhost multipathd: mpathj: event checker started
May 2 21:55:20 localhost multipathd: mpathf: event checker started |
-- dm-devel mailing list dm-devel@xxxxxxxxxx https://www.redhat.com/mailman/listinfo/dm-devel