Open stuartthebruce opened 3 years ago
What...makes you say it doesn't?
cache/log/spare devices are treated differently, but AIUI special devices should just have the normal rules for other data vdev behavior.
Here is an example. The host zfs3
is able to see raidz3 devices from the pools home1
and home3
, but there is no indication that it can't see the special
devices that are part of those pools as seen from the hosts cascade2
and zfs4
that currently have those pools imported. Note, zfs3
is running version 2.1.1 on Rock Linux 8.4.
[root@zfs3 ~]# zpool import
pool: home1
id: 13906513988257520913
state: UNAVAIL
status: The pool has the multihost property on. It cannot
be safely imported when the system hostid is not set.
action: Set a unique system hostid with the zgenhostid(8) command.
see: https://openzfs.github.io/openzfs-docs/msg/ZFS-8000-EY
config:
home1 UNAVAIL currently in use
raidz3-0 DEGRADED
sdgf ONLINE
sdie ONLINE
sdfs ONLINE
sdfw ONLINE
sdgu ONLINE
sdgn ONLINE
sdhc ONLINE
sdgk ONLINE
nvme-Micron_9300_MTFDHAL12T8TDR_21172EAC0D8A UNAVAIL
sdgb ONLINE
sdga ONLINE
sdgc ONLINE
sdgs ONLINE
sdgg ONLINE
sdhx ONLINE
raidz3-1 ONLINE
sdfq ONLINE
sdfp ONLINE
sdgv ONLINE
sdhf ONLINE
sdhg ONLINE
sdft ONLINE
sdgt ONLINE
sdhe ONLINE
sdfy ONLINE
sdge ONLINE
sdfx ONLINE
sdfv ONLINE
sdhd ONLINE
sdfr ONLINE
sdic ONLINE
raidz3-2 ONLINE
sdid ONLINE
sdib ONLINE
sdgl ONLINE
sdgx ONLINE
sdgr ONLINE
sdgi ONLINE
sdif ONLINE
sdgm ONLINE
sdgd ONLINE
sdhz ONLINE
sdgp ONLINE
sdgh ONLINE
sdhm ONLINE
sdfu ONLINE
sdhh ONLINE
raidz3-3 ONLINE
sdhj ONLINE
sdgy ONLINE
sdgq ONLINE
sdfz ONLINE
sdgj ONLINE
sdia ONLINE
sdha ONLINE
sdhn ONLINE
sdgo ONLINE
sdhi ONLINE
sdhk ONLINE
sdhy ONLINE
sdhl ONLINE
sdhb ONLINE
sdgz ONLINE
pool: home3
id: 12759373173601709758
state: UNAVAIL
status: The pool has the multihost property on. It cannot
be safely imported when the system hostid is not set.
action: Set a unique system hostid with the zgenhostid(8) command.
see: https://openzfs.github.io/openzfs-docs/msg/ZFS-8000-EY
config:
home3 UNAVAIL currently in use
raidz3-0 ONLINE
sdfd ONLINE
sdfe ONLINE
sdw ONLINE
wwn-0x5000cca2531e5f38 ONLINE
sdr ONLINE
wwn-0x5000cca2531e5f6c ONLINE
sdfh ONLINE
wwn-0x5000cca2531e84ec ONLINE
sdfi ONLINE
sdfa ONLINE
sdfg ONLINE
sdff ONLINE
wwn-0x5000cca2531e9f48 ONLINE
sdu ONLINE
sdev ONLINE
raidz3-1 ONLINE
wwn-0x5000cca2530aa110 ONLINE
wwn-0x5000cca2530aa424 ONLINE
wwn-0x5000cca2530aacb4 ONLINE
wwn-0x5000cca2530e297c ONLINE
sdeu ONLINE
sdex ONLINE
sder ONLINE
sdes ONLINE
sdl ONLINE
sdez ONLINE
wwn-0x5000cca25316d614 ONLINE
sdew ONLINE
sdeq ONLINE
sdt ONLINE
sdd ONLINE
raidz3-2 ONLINE
wwn-0x5000cca2530a4188 ONLINE
wwn-0x5000cca2530a461c ONLINE
sdfb ONLINE
sdo ONLINE
wwn-0x5000cca2530a4bc4 ONLINE
wwn-0x5000cca2530a58a4 ONLINE
sdm ONLINE
sdp ONLINE
wwn-0x5000cca2530a6f3c ONLINE
sdv ONLINE
sdx ONLINE
wwn-0x5000cca2530a7288 ONLINE
wwn-0x5000cca2530a7408 ONLINE
sdhp ONLINE
sddr ONLINE
raidz3-3 ONLINE
sdds ONLINE
sdn ONLINE
sdz ONLINE
wwn-0x5000cca253089a64 ONLINE
sdey ONLINE
sdet ONLINE
wwn-0x5000cca253091b9c ONLINE
sde ONLINE
sdq ONLINE
sda ONLINE
wwn-0x5000cca2530a0dd0 ONLINE
sds ONLINE
sdf ONLINE
sdy ONLINE
sdfc ONLINE
[root@cascade2 ~]# zpool status
pool: home1
state: ONLINE
scan: resilvered 9.45G in 00:27:37 with 0 errors on Sat Oct 23 13:17:46 2021
config:
NAME STATE READ WRITE CKSUM
home1 ONLINE 0 0 0
raidz3-0 ONLINE 0 0 0
35000cca253134c28 ONLINE 0 0 0
35000cca253146b40 ONLINE 0 0 0
35000cca253155e44 ONLINE 0 0 0
35000cca25319f9ac ONLINE 0 0 0
35000cca2531a6ba0 ONLINE 0 0 0
35000cca2531b8108 ONLINE 0 0 0
35000cca2531bcadc ONLINE 0 0 0
35000cca2531d41f4 ONLINE 0 0 0
nvme-Micron_9300_MTFDHAL12T8TDR_21172EAC0D8A ONLINE 0 0 0
35000cca2531d4cac ONLINE 0 0 0
35000cca2531da728 ONLINE 0 0 0
35000cca2531da880 ONLINE 0 0 0
35000cca2531dad74 ONLINE 0 0 0
35000cca2531ff2bc ONLINE 0 0 0
35000cca2531ffff8 ONLINE 0 0 0
raidz3-1 ONLINE 0 0 0
35000cca253204e9c ONLINE 0 0 0
35000cca253205ffc ONLINE 0 0 0
35000cca2532067e0 ONLINE 0 0 0
35000cca253207fdc ONLINE 0 0 0
35000cca253207ff4 ONLINE 0 0 0
35000cca2532081b0 ONLINE 0 0 0
35000cca25320d79c ONLINE 0 0 0
35000cca25320dad0 ONLINE 0 0 0
35000cca25320e460 ONLINE 0 0 0
35000cca2532105a8 ONLINE 0 0 0
35000cca253217370 ONLINE 0 0 0
35000cca2532176f4 ONLINE 0 0 0
35000cca2532178d8 ONLINE 0 0 0
35000cca25321b168 ONLINE 0 0 0
35000cca25321b5f8 ONLINE 0 0 0
raidz3-2 ONLINE 0 0 0
35000cca25321b774 ONLINE 0 0 0
35000cca25321c2e0 ONLINE 0 0 0
35000cca25321c61c ONLINE 0 0 0
35000cca25321c804 ONLINE 0 0 0
35000cca25321c870 ONLINE 0 0 0
35000cca25321c898 ONLINE 0 0 0
35000cca25321c910 ONLINE 0 0 0
35000cca25321c938 ONLINE 0 0 0
35000cca25321ca74 ONLINE 0 0 0
35000cca25323980c ONLINE 0 0 0
35000cca253241428 ONLINE 0 0 0
35000cca253241574 ONLINE 0 0 0
35000cca253246560 ONLINE 0 0 0
35000cca2532479a4 ONLINE 0 0 0
35000cca253247c68 ONLINE 0 0 0
raidz3-3 ONLINE 0 0 0
35000cca25324a360 ONLINE 0 0 0
35000cca25324b7c0 ONLINE 0 0 0
35000cca25324d8e8 ONLINE 0 0 0
35000cca25324dc4c ONLINE 0 0 0
35000cca253251828 ONLINE 0 0 0
35000cca253256f0c ONLINE 0 0 0
35000cca253257210 ONLINE 0 0 0
35000cca2532572e4 ONLINE 0 0 0
35000cca2532586ec ONLINE 0 0 0
35000cca25325c5f4 ONLINE 0 0 0
35000cca25325c610 ONLINE 0 0 0
35000cca25325c76c ONLINE 0 0 0
35000cca25325fb38 ONLINE 0 0 0
35000cca25325fb5c ONLINE 0 0 0
35000cca25325fb78 ONLINE 0 0 0
special
mirror-6 ONLINE 0 0 0
zfs-64e840b01f4e178c ONLINE 0 0 0
zfs-2901cad643f112c3 ONLINE 0 0 0
zfs-8fa1031490ad0ab2 ONLINE 0 0 0
zfs-3cf59d1d145ab04b ONLINE 0 0 0
mirror-7 ONLINE 0 0 0
zfs-cc86d88f575882ba ONLINE 0 0 0
zfs-039251109fb434af ONLINE 0 0 0
zfs-e7c241bb7dcd4fb8 ONLINE 0 0 0
zfs-b13cf73ec91bb5d2 ONLINE 0 0 0
logs
mirror-4 ONLINE 0 0 0
zfs-b30c96b1eb59e20f ONLINE 0 0 0
zfs-0f8de84266666364 ONLINE 0 0 0
mirror-5 ONLINE 0 0 0
zfs-72606fae4de92cf2 ONLINE 0 0 0
zfs-0978488aae2a7c56 ONLINE 0 0 0
cache
nvme4n1p1 ONLINE 0 0 0
nvme5n1p1 ONLINE 0 0 0
errors: No known data errors
[root@zfs4 ~]# zpool status
pool: home3
state: ONLINE
scan: scrub in progress since Sat Oct 23 12:52:28 2021
39.1T scanned at 2.14G/s, 13.5T issued at 758M/s, 537T total
0B repaired, 2.52% done, 8 days 09:06:09 to go
config:
NAME STATE READ WRITE CKSUM
home3 ONLINE 0 0 0
raidz3-0 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531dd934 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e2a94 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e3ce8 ONLINE 0 0 0
wwn-0x5000cca2531e5f38 ONLINE 0 0 0
35000cca26fcec890 ONLINE 0 0 0
wwn-0x5000cca2531e5f6c ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e6404 ONLINE 0 0 0
wwn-0x5000cca2531e84ec ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e868c ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e87d8 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e9750 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531e9764 ONLINE 0 0 0
wwn-0x5000cca2531e9f48 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531eb96c ONLINE 0 0 0
dm-uuid-mpath-35000cca2531ec858 ONLINE 0 0 0
raidz3-1 ONLINE 0 0 0
wwn-0x5000cca2530aa110 ONLINE 0 0 0
wwn-0x5000cca2530aa424 ONLINE 0 0 0
wwn-0x5000cca2530aacb4 ONLINE 0 0 0
wwn-0x5000cca2530e297c ONLINE 0 0 0
dm-uuid-mpath-35000cca2530f661c ONLINE 0 0 0
dm-uuid-mpath-35000cca291abaed0 ONLINE 0 0 0
dm-uuid-mpath-35000cca253123c08 ONLINE 0 0 0
dm-uuid-mpath-35000cca253158878 ONLINE 0 0 0
dm-uuid-mpath-35000cca253168af4 ONLINE 0 0 0
dm-uuid-mpath-35000cca25316cc20 ONLINE 0 0 0
wwn-0x5000cca25316d614 ONLINE 0 0 0
dm-uuid-mpath-35000cca25316e978 ONLINE 0 0 0
dm-uuid-mpath-35000cca253178e50 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531bc948 ONLINE 0 0 0
dm-uuid-mpath-35000cca2531d7500 ONLINE 0 0 0
raidz3-2 ONLINE 0 0 0
wwn-0x5000cca2530a4188 ONLINE 0 0 0
wwn-0x5000cca2530a461c ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a4868 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a4918 ONLINE 0 0 0
wwn-0x5000cca2530a4bc4 ONLINE 0 0 0
wwn-0x5000cca2530a58a4 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a63f4 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a6adc ONLINE 0 0 0
wwn-0x5000cca2530a6f3c ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a7108 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a7130 ONLINE 0 0 0
wwn-0x5000cca2530a7288 ONLINE 0 0 0
wwn-0x5000cca2530a7408 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a7428 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a7494 ONLINE 0 0 0
raidz3-3 ONLINE 0 0 0
dm-uuid-mpath-35000cca253032d58 ONLINE 0 0 0
dm-uuid-mpath-35000cca253075af0 ONLINE 0 0 0
dm-uuid-mpath-35000cca253084ee4 ONLINE 0 0 0
wwn-0x5000cca253089a64 ONLINE 0 0 0
dm-uuid-mpath-35000cca25308a028 ONLINE 0 0 0
dm-uuid-mpath-35000cca253090cbc ONLINE 0 0 0
wwn-0x5000cca253091b9c ONLINE 0 0 0
dm-uuid-mpath-35000cca2530925bc ONLINE 0 0 0
dm-uuid-mpath-35000cca253093758 ONLINE 0 0 0
dm-uuid-mpath-35000cca25309e6d8 ONLINE 0 0 0
wwn-0x5000cca2530a0dd0 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a0f64 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a1140 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a21c4 ONLINE 0 0 0
dm-uuid-mpath-35000cca2530a23b4 ONLINE 0 0 0
special
mirror-4 ONLINE 0 0 0
zfs-0ef5386f750f66d9 ONLINE 0 0 0
zfs-a2c8272a11a49ac3 ONLINE 0 0 0
mirror-6 ONLINE 0 0 0
zfs-ef648f8d22311c93 ONLINE 0 0 0
zfs-9f637799f030adc6 ONLINE 0 0 0
logs
system-slog ONLINE 0 0 0
errors: No known data errors
If I had to guess what's happening...
Normally, it does try to print special vdevs that it found:
Or tell you if it knows there are more vdevs that it didn't find:
But the latter wouldn't happen if reason
was ZPOOL_STATUS_HOSTID_REQUIRED
, which it had to be for you to get the MMP error in the first place. Maybe you could munge up the config after getting that back to go try the import again with the multihost status fudged to be ignored?
As a first test I ran zgenhostid
, but that didn't make a difference. I will take a look at setting up a non-production pool to see what happens with MMP disabled.
[root@zfs3 ~]# zgenhostid
[root@zfs3 ~]# zpool import
pool: home1
id: 13906513988257520913
state: UNAVAIL
status: The pool is currently imported by another system.
action: The pool must be exported from cascade2 (hostid=80630521)
before it can be safely imported.
see: https://openzfs.github.io/openzfs-docs/msg/ZFS-8000-EY
config:
home1 UNAVAIL currently in use
raidz3-0 DEGRADED
sdgf ONLINE
sdie ONLINE
sdfs ONLINE
sdfw ONLINE
sdgu ONLINE
sdgn ONLINE
sdhc ONLINE
sdgk ONLINE
nvme-Micron_9300_MTFDHAL12T8TDR_21172EAC0D8A UNAVAIL
sdgb ONLINE
sdga ONLINE
sdgc ONLINE
sdgs ONLINE
sdgg ONLINE
sdhx ONLINE
raidz3-1 ONLINE
sdfq ONLINE
sdfp ONLINE
sdgv ONLINE
sdhf ONLINE
sdhg ONLINE
sdft ONLINE
sdgt ONLINE
sdhe ONLINE
sdfy ONLINE
sdge ONLINE
sdfx ONLINE
sdfv ONLINE
sdhd ONLINE
sdfr ONLINE
sdic ONLINE
raidz3-2 ONLINE
sdid ONLINE
sdib ONLINE
sdgl ONLINE
sdgx ONLINE
sdgr ONLINE
sdgi ONLINE
sdif ONLINE
sdgm ONLINE
sdgd ONLINE
sdhz ONLINE
sdgp ONLINE
sdgh ONLINE
sdhm ONLINE
sdfu ONLINE
sdhh ONLINE
raidz3-3 ONLINE
sdhj ONLINE
sdgy ONLINE
sdgq ONLINE
sdfz ONLINE
sdgj ONLINE
sdia ONLINE
sdha ONLINE
sdhn ONLINE
sdgo ONLINE
sdhi ONLINE
sdhk ONLINE
sdhy ONLINE
sdhl ONLINE
sdhb ONLINE
sdgz ONLINE
pool: home3
id: 12759373173601709758
state: UNAVAIL
status: The pool is currently imported by another system.
action: The pool must be exported from zfs4 (hostid=f67b1940)
before it can be safely imported.
see: https://openzfs.github.io/openzfs-docs/msg/ZFS-8000-EY
config:
home3 UNAVAIL currently in use
raidz3-0 ONLINE
sdan ONLINE
sdfe ONLINE
sdw ONLINE
wwn-0x5000cca2531e5f38 ONLINE
sdr ONLINE
wwn-0x5000cca2531e5f6c ONLINE
sdfh ONLINE
wwn-0x5000cca2531e84ec ONLINE
sdfi ONLINE
sdfa ONLINE
sdfg ONLINE
sdff ONLINE
wwn-0x5000cca2531e9f48 ONLINE
sdu ONLINE
sdev ONLINE
raidz3-1 ONLINE
wwn-0x5000cca2530aa110 ONLINE
wwn-0x5000cca2530aa424 ONLINE
wwn-0x5000cca2530aacb4 ONLINE
wwn-0x5000cca2530e297c ONLINE
sdeu ONLINE
sdex ONLINE
sder ONLINE
sdes ONLINE
sdl ONLINE
sdez ONLINE
wwn-0x5000cca25316d614 ONLINE
sdew ONLINE
sdeq ONLINE
sdt ONLINE
sdd ONLINE
raidz3-2 ONLINE
wwn-0x5000cca2530a4188 ONLINE
wwn-0x5000cca2530a461c ONLINE
sdfb ONLINE
sdo ONLINE
wwn-0x5000cca2530a4bc4 ONLINE
wwn-0x5000cca2530a58a4 ONLINE
sdm ONLINE
sdp ONLINE
wwn-0x5000cca2530a6f3c ONLINE
sdv ONLINE
sdx ONLINE
wwn-0x5000cca2530a7288 ONLINE
wwn-0x5000cca2530a7408 ONLINE
sdhp ONLINE
sddr ONLINE
raidz3-3 ONLINE
sdds ONLINE
sdn ONLINE
sdz ONLINE
wwn-0x5000cca253089a64 ONLINE
sdey ONLINE
sdet ONLINE
wwn-0x5000cca253091b9c ONLINE
sde ONLINE
sdq ONLINE
sddq ONLINE
wwn-0x5000cca2530a0dd0 ONLINE
sds ONLINE
sdf ONLINE
sdy ONLINE
sdfc ONLINE
Here is another example where the host zfs1 does not report unaccessible special
devices from the home4
pool that is currently imported on another system (zfs2). Note, zpool import
reports the pool as FAULTED even though it is state: ONLINE
with no errors on the system where it is active. Perhaps the missing devices
are trigging the state: FAULTED
? Or this may the result of zfs2 running ZFS version 2.1.1 and zfs1 version 2.0.5?
Not sure what to test next, but I can export and re-import this home4
pool to run additional tests if that would be helpful?
[root@zfs1 ~]# zpool import
pool: home4
id: 7815774519246331192
state: FAULTED
status: The pool was last accessed by another system.
action: The pool cannot be imported due to damaged devices or data.
The pool may be active on another system, but can be imported using
the '-f' flag.
see: https://openzfs.github.io/openzfs-docs/msg/ZFS-8000-EY
config:
home4 FAULTED corrupted data
raidz3-0 ONLINE
dm-uuid-mpath-35000cca253077224 ONLINE
dm-uuid-mpath-35000cca253077640 ONLINE
dm-uuid-mpath-35000cca25308c90c ONLINE
dm-uuid-mpath-35000cca25308e49c ONLINE
dm-uuid-mpath-35000cca25308e95c ONLINE
dm-uuid-mpath-35000cca2530c2410 ONLINE
dm-uuid-mpath-35000cca2530ca5ac ONLINE
dm-uuid-mpath-35000cca2530e04d8 ONLINE
dm-uuid-mpath-35000cca2530e8598 ONLINE
dm-uuid-mpath-35000cca2530efc7c ONLINE
dm-uuid-mpath-35000cca2530f11bc ONLINE
dm-uuid-mpath-35000cca2530f2054 ONLINE
dm-uuid-mpath-35000cca2530f6a80 ONLINE
dm-uuid-mpath-35000cca2530f9c60 ONLINE
dm-uuid-mpath-35000cca25315aac4 ONLINE
raidz3-1 ONLINE
dm-uuid-mpath-35000cca253160470 ONLINE
dm-uuid-mpath-35000cca253169604 ONLINE
dm-uuid-mpath-35000cca25316d710 ONLINE
dm-uuid-mpath-35000cca25316d7a8 ONLINE
dm-uuid-mpath-35000cca25316de88 ONLINE
dm-uuid-mpath-35000cca25316e118 ONLINE
dm-uuid-mpath-35000cca25316e224 ONLINE
dm-uuid-mpath-35000cca2b007cbec ONLINE
dm-uuid-mpath-35000cca25316fc6c ONLINE
dm-uuid-mpath-35000cca25316fe54 ONLINE
dm-uuid-mpath-35000cca2b003dc68 ONLINE
dm-uuid-mpath-35000cca25317e204 ONLINE
dm-uuid-mpath-35000cca253187dbc ONLINE
dm-uuid-mpath-35000cca27066af18 ONLINE
dm-uuid-mpath-35000cca25318c0cc ONLINE
raidz3-2 ONLINE
dm-uuid-mpath-35000cca25318c288 ONLINE
dm-uuid-mpath-35000cca25319f120 ONLINE
dm-uuid-mpath-35000cca25319f538 ONLINE
dm-uuid-mpath-35000cca25319f670 ONLINE
dm-uuid-mpath-35000cca25319f6c4 ONLINE
dm-uuid-mpath-35000cca2531a4e5c ONLINE
dm-uuid-mpath-35000cca2531ae454 ONLINE
dm-uuid-mpath-35000cca2531bc110 ONLINE
dm-uuid-mpath-35000cca2531c0a14 ONLINE
dm-uuid-mpath-35000cca2531dd694 ONLINE
dm-uuid-mpath-35000cca2531dd8b4 ONLINE
dm-uuid-mpath-35000cca2531e3dac ONLINE
dm-uuid-mpath-35000cca2531e3e5c ONLINE
dm-uuid-mpath-35000cca2531e43f0 ONLINE
dm-uuid-mpath-35000cca2531e5edc ONLINE
raidz3-3 ONLINE
dm-uuid-mpath-35000cca2531e5eec ONLINE
dm-uuid-mpath-35000cca2531e62c4 ONLINE
dm-uuid-mpath-35000cca2531e6374 ONLINE
dm-uuid-mpath-35000cca2531e63cc ONLINE
dm-uuid-mpath-35000cca2531e8504 ONLINE
dm-uuid-mpath-35000cca2b0044a14 ONLINE
dm-uuid-mpath-35000cca2531e893c ONLINE
dm-uuid-mpath-35000cca2531e89dc ONLINE
dm-uuid-mpath-35000cca2531e89f8 ONLINE
dm-uuid-mpath-35000cca2531e9c24 ONLINE
dm-uuid-mpath-35000cca2531eb9d4 ONLINE
dm-uuid-mpath-35000cca2531ef3c0 ONLINE
dm-uuid-mpath-35000cca2531f0cf0 ONLINE
dm-uuid-mpath-35000cca270db8688 ONLINE
dm-uuid-mpath-35000cca291def410 ONLINE
[root@zfs2 ~]# zpool status
pool: home4
state: ONLINE
scan: scrub in progress since Sat Oct 23 11:21:23 2021
249T scanned at 3.29G/s, 239T issued at 3.15G/s, 249T total
0B repaired, 95.82% done, 00:56:26 to go
Describe the feature would like to see added to OpenZFS
zpool import
with no pool specification should report whether discovered pools withspecial
devices could be importedHow will this feature improve OpenZFS?
This would facilitate checking HA setups to confirm that all hosts can see all the necessary devices before they actually need to import a pool.
Additional context
I think it would also be helpful to report on the accessibility of
log
andcache
devices as well.