一、查看硬盘信息
[root@localhost ~]# lspci |grep -i "non-vol"
d9:00.0 Non-Volatile memory controller: Intel Corporation NVMe Datacenter SSD [3DNAND, Beta Rock Controller]
da:00.0 Non-Volatile memory controller: Intel Corporation NVMe Datacenter SSD [3DNAND, Beta Rock Controller]
[root@localhost ~]# lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.9T 0 disk
sdb 8:16 0 287.7G 0 disk
sdc 8:32 0 59.5G 0 disk
├─sdc1 8:33 0 200M 0 part /boot/efi
├─sdc2 8:34 0 1G 0 part /boot
└─sdc3 8:35 0 58.3G 0 part
├─rhel-root 253:0 0 36.5G 0 lvm /
├─rhel-swap 253:1 0 4G 0 lvm [SWAP]
└─rhel-home 253:2 0 17.8G 0 lvm /home
nvme0n1 259:1 0 1.8T 0 disk
nvme1n1 259:0 0 1.8T 0 disk
二、创建raid
[root@localhost ~]# mdadm -C /dev/md0 --force --level=raid1 --bitmap=internal --raid-devices=2 --assume-clean /dev/nvme0n1 /dev/nvme1n1
mdadm: partition table exists on /dev/nvme0n1
mdadm: partition table exists on /dev/nvme0n1 but will be lost or
meaningless after creating array
mdadm: Note: this array has metadata at the start and
may not be suitable as a boot device. If you plan to
store '/boot' on this device please ensure that
your boot-loader understands md/v1.x metadata, or use
--metadata=0.90
Continue creating array?
Continue creating array? (y/n) y
mdadm: Defaulting to version 1.2 metadata
mdadm: array /dev/md0 started.
[root@localhost ~]# cat /proc/mdstat
Personalities : [raid1]
md0 : active raid1 nvme1n1[1] nvme0n1[0]
1953382464 blocks super 1.2 [2/2] [UU]
bitmap: 0/15 pages [0KB], 65536KB chunk
unused devices: <none>
[root@localhost ~]# mdadm --detail /dev/md0
/dev/md0:
Version : 1.2
Creation Time : Fri Mar 10 10:42:01 2023
Raid Level : raid1
Array Size : 1953382464 (1862.89 GiB 2000.26 GB)
Used Dev Size : 1953382464 (1862.89 GiB 2000.26 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Mar 10 10:42:01 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : localhost.localdomain:0 (local to host localhost.localdomain)
UUID : 46813b05:e55c293c:9d0b5435:e56c64be
Events : 0
Number Major Minor RaidDevice State
0 259 1 0 active sync /dev/nvme0n1
1 259 0 1 active sync /dev/nvme1n1
三、raid剔除nvme硬盘
[root@localhost ~]# mdadm --manage /dev/md0 --fail /dev/nvme0n1
mdadm: set /dev/nvme0n1 faulty in /dev/md0
[root@localhost ~]# cat /proc/mdstat
Personalities : [raid1]
md0 : active raid1 nvme1n1[1] nvme0n1[0](F)
1953382464 blocks super 1.2 [2/1] [_U]
bitmap: 0/15 pages [0KB], 65536KB chunk
unused devices: <none>
[root@localhost ~]# mdadm --detail /dev/md0
/dev/md0:
Version : 1.2
Creation Time : Fri Mar 10 10:42:01 2023
Raid Level : raid1
Array Size : 1953382464 (1862.89 GiB 2000.26 GB)
Used Dev Size : 1953382464 (1862.89 GiB 2000.26 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Mar 10 10:43:46 2023
State : clean, degraded
Active Devices : 1
Working Devices : 1
Failed Devices : 1
Spare Devices : 0
Consistency Policy : bitmap
Name : localhost.localdomain:0 (local to host localhost.localdomain)
UUID : 46813b05:e55c293c:9d0b5435:e56c64be
Events : 2
Number Major Minor RaidDevice State
- 0 0 0 removed
1 259 0 1 active sync /dev/nvme1n1
0 259 1 - faulty /dev/nvme0n1
四、OS剔除nvme硬盘
[root@localhost ~]# lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.9T 0 disk
sdb 8:16 0 287.7G 0 disk
sdc 8:32 0 59.5G 0 disk
├─sdc1 8:33 0 200M 0 part /boot/efi
├─sdc2 8:34 0 1G 0 part /boot
└─sdc3 8:35 0 58.3G 0 part
├─rhel-root 253:0 0 36.5G 0 lvm /
├─rhel-swap 253:1 0 4G 0 lvm [SWAP]
└─rhel-home 253:2 0 17.8G 0 lvm /home
nvme0n1 259:1 0 1.8T 0 disk
└─md0 9:0 0 1.8T 0 raid1
nvme1n1 259:0 0 1.8T 0 disk
└─md0 9:0 0 1.8T 0 raid1
[root@localhost ~]# find /sys/devices|egrep 'nvme[0-9]?$'
/sys/devices/pci0000:d7/0000:d7:01.0/0000:d9:00.0/nvme
/sys/devices/pci0000:d7/0000:d7:01.0/0000:d9:00.0/nvme/nvme0
/sys/devices/pci0000:d7/0000:d7:02.0/0000:da:00.0/nvme
/sys/devices/pci0000:d7/0000:d7:02.0/0000:da:00.0/nvme/nvme1
[root@localhost ~]# udevadm info /dev/nvme0n1 |grep "P:"
P: /devices/pci0000:d7/0000:d7:01.0/0000:d9:00.0/nvme/nvme0/nvme0n1
[root@localhost ~]# udevadm info /dev/nvme1n1 |grep "P:"
P: /devices/pci0000:d7/0000:d7:02.0/0000:da:00.0/nvme/nvme1/nvme1n1
[root@localhost ~]# lspci -s d9:00.0 -v |grep -i slot
Physical Slot: 69
[root@localhost ~]# lspci -s da:00.0 -v |grep -i slot
Physical Slot: 68
[root@localhost ~]# echo 0 >/sys/bus/pci/slots/69/power
[root@localhost ~]# lspci |grep -i "non-vol"
da:00.0 Non-Volatile memory controller: Intel Corporation NVMe Datacenter SSD [3DNAND, Beta Rock Controller]
[root@localhost ~]# lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.9T 0 disk
sdb 8:16 0 287.7G 0 disk
sdc 8:32 0 59.5G 0 disk
├─sdc1 8:33 0 200M 0 part /boot/efi
├─sdc2 8:34 0 1G 0 part /boot
└─sdc3 8:35 0 58.3G 0 part
├─rhel-root 253:0 0 36.5G 0 lvm /
├─rhel-swap 253:1 0 4G 0 lvm [SWAP]
└─rhel-home 253:2 0 17.8G 0 lvm /home
nvme1n1 259:0 0 1.8T 0 disk
└─md0 9:0 0 1.8T 0 raid1
[root@localhost ~]# mdadm --detail /dev/md0
/dev/md0:
Version : 1.2
Creation Time : Fri Mar 10 10:42:01 2023
Raid Level : raid1
Array Size : 1953382464 (1862.89 GiB 2000.26 GB)
Used Dev Size : 1953382464 (1862.89 GiB 2000.26 GB)
Raid Devices : 2
Total Devices : 1
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Mar 10 10:43:46 2023
State : clean, degraded
Active Devices : 1
Working Devices : 1
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : localhost.localdomain:0 (local to host localhost.localdomain)
UUID : 46813b05:e55c293c:9d0b5435:e56c64be
Events : 2
Number Major Minor RaidDevice State
- 0 0 0 removed
1 259 0 1 active sync /dev/nvme1n1
五、物理更换硬盘
六、OS重新扫描nvme硬盘
[root@localhost ~]# echo 1 >/sys/bus/pci/slots/69/power
[root@localhost ~]# lspci |grep -i "non-vol"
d9:00.0 Non-Volatile memory controller: Intel Corporation NVMe Datacenter SSD [3DNAND, Beta Rock Controller]
da:00.0 Non-Volatile memory controller: Intel Corporation NVMe Datacenter SSD [3DNAND, Beta Rock Controller]
[root@localhost ~]# lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 1.9T 0 disk
sdb 8:16 0 287.7G 0 disk
sdc 8:32 0 59.5G 0 disk
├─sdc1 8:33 0 200M 0 part /boot/efi
├─sdc2 8:34 0 1G 0 part /boot
└─sdc3 8:35 0 58.3G 0 part
├─rhel-root 253:0 0 36.5G 0 lvm /
├─rhel-swap 253:1 0 4G 0 lvm [SWAP]
└─rhel-home 253:2 0 17.8G 0 lvm /home
nvme0n1 259:1 0 1.8T 0 disk
nvme1n1 259:0 0 1.8T 0 disk
└─md0 9:0 0 1.8T 0 raid1
七、Raid重新添加新nvme硬盘
[root@localhost ~]# mdadm --manage /dev/md0 --add /dev/nvme0n1
mdadm: re-added /dev/nvme0n1
[root@localhost ~]# cat /proc/mdstat
Personalities : [raid1]
md0 : active raid1 nvme0n1[0] nvme1n1[1]
1953382464 blocks super 1.2 [2/2] [UU]
bitmap: 0/15 pages [0KB], 65536KB chunk
unused devices: <none>
[root@localhost ~]# mdadm --detail /dev/md0
/dev/md0:
Version : 1.2
Creation Time : Fri Mar 10 10:42:01 2023
Raid Level : raid1
Array Size : 1953382464 (1862.89 GiB 2000.26 GB)
Used Dev Size : 1953382464 (1862.89 GiB 2000.26 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Mar 10 10:51:12 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : localhost.localdomain:0 (local to host localhost.localdomain)
UUID : 46813b05:e55c293c:9d0b5435:e56c64be
Events : 7
Number Major Minor RaidDevice State
0 259 1 0 active sync /dev/nvme0n1
1 259 0 1 active sync /dev/nvme1n1