Hello all.
I've just set up a cluster and am trying to move vms around. The vm's disk is on my shared storage pool nfs1.
If anyone has any ideas on what i did wrong, please let me know.
Here are the details.
root@sm01:~# qm config 107
agent: 1
boot: c
bootdisk: scsi0
cipassword: **********
cores: 2
ipconfig0: ip=dhcp
memory: 2048
name: dvr
net0: virtio=06:7F:39C:60:28,bridge=vmbr0
numa: 0
onboot: 1
ostype: l26
scsi0: nfs1:107/vm-107-disk-0.qcow2,size=32972M
scsihw: virtio-scsi-pci
serial0: socket
smbios1: uuid=ac52b189-4718-4ea9-a5ec-98965476c7c8
sockets: 1
sshkeys: ssh-rsa%20AAAAB3NzaC1yc2EAAAADAQABAAABAQCnqFVxZn135ah6RJIKIopxoXFHmtWNufQml7lO%2BE4sxpNodaPJ2IZxnEzs%2FBJ5fdRWnI3eUASGiEJrPTzuDuySW7td2yN3Ft3YcPHZk8N%2Fj55pXSjO5%2BQJ1qU698K5x9f8d8zZkxGE6HrxdZkUFNLalW4kTYwupE2Fo2sseFWMPR5bTInkuKEd%2BV4Q1szuWgXMwPE5ibobuVlajmiiuHrYP%2B2k%2F6%2BQdmZoqUA4j61egxizjMwIManfCJ%2F8Mlq0rZO1Ft2EQuOfL3W2hPiy8YahTK1zXjcAs5RoAALZtj8wraNeAa0VaGxQk%2FnCAa1KNHHksWj3Y7cw6nKY3TsA%2BNGf%20imported-openssh-key
vga: serial0
vmgenid: a493aeda-90ba-448f-9e50-430b72d1ee8b
root@sm01:~# qm migrate 107 cdpve --online
2020-11-11 08:23:38 starting migration of VM 107 to node 'cdpve' (10.0.0.1)
zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'dpool': no such pool
2020-11-11 08:23:38 ERROR: Failed to sync data - could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
2020-11-11 08:23:38 aborting phase 1 - cleanup resources
2020-11-11 08:23:38 ERROR: migration aborted (duration 00:00:00): Failed to sync data - could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
migration aborted
root@sm01:~# pvesm status
zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'dpool': no such pool
could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'mail': no such pool
zfs error: cannot open 'mail': no such pool
could not activate storage 'mail', zfs error: cannot open 'mail': no such pool
Name Type Status Total Used Available %
dpool zfspool inactive 0 0 0 0.00%
dpool1 zfspool active 9442950520 432563752 9010386768 4.58%
local dir active 183902848 5723520 178179328 3.11%
local-zfs zfspool active 233931328 55751984 178179344 23.83%
mail zfspool inactive 0 0 0 0.00%
nfs1 nfs active 183903232 5723136 178180096 3.11%
root@sm01:~# pvecm status
Cluster information
-------------------
Name: bks1
Config Version: 3
Transport: knet
Secure auth: on
Quorum information
------------------
Date: Wed Nov 11 09:54:00 2020
Quorum provider: corosync_votequorum
Nodes: 2
Node ID: 0x00000002
Ring ID: 1.36
Quorate: Yes
Votequorum information
----------------------
Expected votes: 2
Highest expected: 2
Total votes: 2
Quorum: 2
Flags: Quorate
Membership information
----------------------
Nodeid Votes Name
0x00000001 1 10.0.0.1
0x00000002 1 10.0.0.2 (local)
root@sm01:~# cat /etc/pve/storage.cfg
dir: local
path /var/lib/vz
content vztmpl,iso,backup
zfspool: local-zfs
pool rpool/data
content images,rootdir
sparse 1
zfspool: dpool
pool dpool
content rootdir,images
sparse 0
zfspool: mail
pool mail
content rootdir,images
mountpoint /mail
sparse 1
nfs: nfs1
export /mnt/nfs1
path /mnt/pve/nfs1
server 10.2.2.21
content images,rootdir
options vers=4.2
zfspool: dpool1
pool dpool1
content images,rootdir
mountpoint /dpool1
sparse 0
I've just set up a cluster and am trying to move vms around. The vm's disk is on my shared storage pool nfs1.
If anyone has any ideas on what i did wrong, please let me know.
Here are the details.
root@sm01:~# qm config 107
agent: 1
boot: c
bootdisk: scsi0
cipassword: **********
cores: 2
ipconfig0: ip=dhcp
memory: 2048
name: dvr
net0: virtio=06:7F:39C:60:28,bridge=vmbr0
numa: 0
onboot: 1
ostype: l26
scsi0: nfs1:107/vm-107-disk-0.qcow2,size=32972M
scsihw: virtio-scsi-pci
serial0: socket
smbios1: uuid=ac52b189-4718-4ea9-a5ec-98965476c7c8
sockets: 1
sshkeys: ssh-rsa%20AAAAB3NzaC1yc2EAAAADAQABAAABAQCnqFVxZn135ah6RJIKIopxoXFHmtWNufQml7lO%2BE4sxpNodaPJ2IZxnEzs%2FBJ5fdRWnI3eUASGiEJrPTzuDuySW7td2yN3Ft3YcPHZk8N%2Fj55pXSjO5%2BQJ1qU698K5x9f8d8zZkxGE6HrxdZkUFNLalW4kTYwupE2Fo2sseFWMPR5bTInkuKEd%2BV4Q1szuWgXMwPE5ibobuVlajmiiuHrYP%2B2k%2F6%2BQdmZoqUA4j61egxizjMwIManfCJ%2F8Mlq0rZO1Ft2EQuOfL3W2hPiy8YahTK1zXjcAs5RoAALZtj8wraNeAa0VaGxQk%2FnCAa1KNHHksWj3Y7cw6nKY3TsA%2BNGf%20imported-openssh-key
vga: serial0
vmgenid: a493aeda-90ba-448f-9e50-430b72d1ee8b
root@sm01:~# qm migrate 107 cdpve --online
2020-11-11 08:23:38 starting migration of VM 107 to node 'cdpve' (10.0.0.1)
zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'dpool': no such pool
2020-11-11 08:23:38 ERROR: Failed to sync data - could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
2020-11-11 08:23:38 aborting phase 1 - cleanup resources
2020-11-11 08:23:38 ERROR: migration aborted (duration 00:00:00): Failed to sync data - could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
migration aborted
root@sm01:~# pvesm status
zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'dpool': no such pool
could not activate storage 'dpool', zfs error: cannot open 'dpool': no such pool
zfs error: cannot open 'mail': no such pool
zfs error: cannot open 'mail': no such pool
could not activate storage 'mail', zfs error: cannot open 'mail': no such pool
Name Type Status Total Used Available %
dpool zfspool inactive 0 0 0 0.00%
dpool1 zfspool active 9442950520 432563752 9010386768 4.58%
local dir active 183902848 5723520 178179328 3.11%
local-zfs zfspool active 233931328 55751984 178179344 23.83%
mail zfspool inactive 0 0 0 0.00%
nfs1 nfs active 183903232 5723136 178180096 3.11%
root@sm01:~# pvecm status
Cluster information
-------------------
Name: bks1
Config Version: 3
Transport: knet
Secure auth: on
Quorum information
------------------
Date: Wed Nov 11 09:54:00 2020
Quorum provider: corosync_votequorum
Nodes: 2
Node ID: 0x00000002
Ring ID: 1.36
Quorate: Yes
Votequorum information
----------------------
Expected votes: 2
Highest expected: 2
Total votes: 2
Quorum: 2
Flags: Quorate
Membership information
----------------------
Nodeid Votes Name
0x00000001 1 10.0.0.1
0x00000002 1 10.0.0.2 (local)
root@sm01:~# cat /etc/pve/storage.cfg
dir: local
path /var/lib/vz
content vztmpl,iso,backup
zfspool: local-zfs
pool rpool/data
content images,rootdir
sparse 1
zfspool: dpool
pool dpool
content rootdir,images
sparse 0
zfspool: mail
pool mail
content rootdir,images
mountpoint /mail
sparse 1
nfs: nfs1
export /mnt/nfs1
path /mnt/pve/nfs1
server 10.2.2.21
content images,rootdir
options vers=4.2
zfspool: dpool1
pool dpool1
content images,rootdir
mountpoint /dpool1
sparse 0