A bit more information is neededHi, it looks like the zfs file system is using disc space for some reason how do you reclaim it or defrag the disc.
zpool list -v
zpool status
root@thundercloud:~# zfs list -t all
NAME USED AVAIL REFER MOUNTPOINT
rpool 1.72T 41.2G 104K /rpool
rpool/ROOT 19.1G 41.2G 96K /rpool/ROOT
rpool/ROOT/pve-1 19.1G 41.2G 19.1G /
rpool/data 1.70T 41.2G 96K /rpool/data
rpool/data/subvol-100-disk-0 1.05G 10.9G 1.05G /rpool/data/subvol-100-disk-0
rpool/data/vm-102-disk-0 376K 41.2G 92K -
rpool/data/vm-102-disk-0@snap1 172K - 172K -
rpool/data/vm-102-disk-0@before24h2 112K - 112K -
rpool/data/vm-102-disk-1 447G 41.2G 243G -
rpool/data/vm-102-disk-1@snap1 149G - 219G -
rpool/data/vm-102-disk-1@before24h2 45.0G - 245G -
rpool/data/vm-102-disk-2 236K 41.2G 84K -
rpool/data/vm-102-disk-2@snap1 76K - 84K -
rpool/data/vm-102-disk-2@before24h2 76K - 84K -
rpool/data/vm-102-state-snap1 2.98G 41.2G 2.98G -
rpool/data/vm-103-disk-0 508K 41.2G 120K -
rpool/data/vm-103-disk-0@Snap1 72K - 124K -
rpool/data/vm-103-disk-0@Snap2 76K - 128K -
rpool/data/vm-103-disk-0@snap3 0B - 104K -
rpool/data/vm-103-disk-0@Sanp4 0B - 104K -
rpool/data/vm-103-disk-0@snap5 84K - 120K -
rpool/data/vm-103-disk-1 148G 41.2G 74.9G -
rpool/data/vm-103-disk-1@Snap1 2.84G - 17.4G -
rpool/data/vm-103-disk-1@Snap2 4.84G - 19.9G -
rpool/data/vm-103-disk-1@snap3 3.72G - 34.2G -
rpool/data/vm-103-disk-1@Sanp4 4.20G - 35.7G -
rpool/data/vm-103-disk-1@snap5 18.5G - 40.4G -
rpool/data/vm-103-disk-2 476K 41.2G 84K -
rpool/data/vm-103-disk-2@Snap1 68K - 68K -
rpool/data/vm-103-disk-2@Snap2 84K - 84K -
rpool/data/vm-103-disk-2@snap3 80K - 84K -
rpool/data/vm-103-disk-2@Sanp4 80K - 84K -
rpool/data/vm-103-disk-2@snap5 76K - 84K -
rpool/data/vm-103-state-Sanp4 4.83G 41.2G 4.83G -
rpool/data/vm-103-state-Snap2 3.88G 41.2G 3.88G -
rpool/data/vm-103-state-snap3 4.47G 41.2G 4.47G -
rpool/data/vm-103-state-snap5 4.11G 41.2G 4.11G -
rpool/data/vm-104-disk-0 304K 41.2G 132K -
rpool/data/vm-104-disk-0@beforetitustweaks 172K - 172K -
rpool/data/vm-104-disk-1 175G 41.2G 134G -
rpool/data/vm-104-disk-1@beforetitustweaks 41.0G - 135G -
rpool/data/vm-104-disk-2 160K 41.2G 84K -
rpool/data/vm-104-disk-2@beforetitustweaks 76K - 84K -
rpool/data/vm-106-disk-0 580K 41.2G 196K -
rpool/data/vm-106-disk-0@snap1 72K - 116K -
rpool/data/vm-106-disk-0@snap2 68K - 120K -
rpool/data/vm-106-disk-0@snap3 72K - 152K -
rpool/data/vm-106-disk-0@snap4 92K - 172K -
rpool/data/vm-106-disk-1 143G 41.2G 87.5G -
rpool/data/vm-106-disk-1@snap1 1.55G - 17.9G -
rpool/data/vm-106-disk-1@snap2 1.65G - 18.4G -
rpool/data/vm-106-disk-1@snap3 10.3G - 27.3G -
rpool/data/vm-106-disk-1@snap4 23.5G - 35.9G -
rpool/data/vm-106-disk-2 388K 41.2G 84K -
rpool/data/vm-106-disk-2@snap1 68K - 68K -
rpool/data/vm-106-disk-2@snap2 76K - 84K -
rpool/data/vm-106-disk-2@snap3 76K - 84K -
rpool/data/vm-106-disk-2@snap4 76K - 84K -
rpool/data/vm-106-state-snap2 4.94G 41.2G 4.94G -
rpool/data/vm-106-state-snap4 4.94G 41.2G 4.94G -
rpool/data/vm-111-disk-0 148K 41.2G 148K -
rpool/data/vm-111-disk-1 67.8G 41.2G 67.8G -
rpool/data/vm-111-disk-2 68K 41.2G 68K -
rpool/data/vm-113-disk-0 112K 41.2G 112K -
rpool/data/vm-113-disk-1 6.54G 41.2G 6.54G -
rpool/data/vm-114-disk-0 284K 41.2G 152K -
rpool/data/vm-114-disk-0@beforeportmaster 132K - 132K -
rpool/data/vm-114-disk-1 110G 41.2G 61.2G -
rpool/data/vm-114-disk-1@beforeportmaster 48.6G - 59.0G -
rpool/data/vm-114-disk-2 132K 41.2G 68K -
rpool/data/vm-114-disk-2@beforeportmaster 64K - 68K -
rpool/data/vm-114-state-beforeportmaster 3.34G 41.2G 3.34G -
rpool/data/vm-115-disk-0 448K 41.2G 104K -
rpool/data/vm-115-disk-0@beforetweaks 116K - 116K -
rpool/data/vm-115-disk-0@Redwood 136K - 136K -
rpool/data/vm-115-disk-0@before24h2 92K - 92K -
rpool/data/vm-115-disk-1 125G 41.2G 67.4G -
rpool/data/vm-115-disk-1@beforetweaks 8.99G - 15.6G -
rpool/data/vm-115-disk-1@Redwood 10.6G - 36.6G -
rpool/data/vm-115-disk-1@before24h2 12.4G - 37.1G -
rpool/data/vm-115-disk-2 268K 41.2G 68K -
rpool/data/vm-115-disk-2@beforetweaks 68K - 68K -
rpool/data/vm-115-disk-2@Redwood 68K - 68K -
rpool/data/vm-115-disk-2@before24h2 64K - 68K -
rpool/data/vm-115-state-Redwood 3.85G 41.2G 3.85G -
rpool/data/vm-115-state-before24h2 3.63G 41.2G 3.63G -
rpool/data/vm-115-state-beforetweaks 3.11G 41.2G 3.11G -
rpool/data/vm-117-disk-0 140K 41.2G 140K -
rpool/data/vm-117-disk-1 50.4G 41.2G 50.4G -
rpool/data/vm-117-disk-2 64K 41.2G 64K -
rpool/data/vm-118-disk-0 444K 41.2G 124K -
rpool/data/vm-118-disk-0@firstboot 96K - 96K -
rpool/data/vm-118-disk-0@christiuswiniso 132K - 132K -
rpool/data/vm-118-disk-0@before24h2 92K - 92K -
rpool/data/vm-118-disk-1 164G 41.2G 103G -
rpool/data/vm-118-disk-1@firstboot 6.54G - 8.58G -
rpool/data/vm-118-disk-1@christiuswiniso 24.3G - 42.6G -
rpool/data/vm-118-disk-1@before24h2 24.0G - 95.6G -
rpool/data/vm-118-disk-2 268K 41.2G 68K -
rpool/data/vm-118-disk-2@firstboot 68K - 68K -
rpool/data/vm-118-disk-2@christiuswiniso 68K - 68K -
rpool/data/vm-118-disk-2@before24h2 64K - 68K -
rpool/data/vm-118-state-christiuswiniso 5.01G 41.2G 5.01G -
rpool/data/vm-118-state-firstboot 1.83G 41.2G 1.83G -
rpool/data/vm-119-disk-0 568K 41.2G 124K -
rpool/data/vm-119-disk-0@zoom 80K - 136K -
rpool/data/vm-119-disk-0@twodays2go 84K - 140K -
rpool/data/vm-119-disk-0@IOT24H2Update 80K - 128K -
rpool/data/vm-119-disk-0@BeforeWinPro 96K - 144K -
rpool/data/vm-119-disk-1 157G 41.2G 88.7G -
rpool/data/vm-119-disk-1@zoom 6.09G - 31.4G -
rpool/data/vm-119-disk-1@twodays2go 6.38G - 31.9G -
rpool/data/vm-119-disk-1@IOT24H2Update 10.8G - 89.4G -
rpool/data/vm-119-disk-1@BeforeWinPro 9.54G - 89.6G -
rpool/data/vm-119-disk-2 328K 41.2G 68K -
rpool/data/vm-119-disk-2@zoom 68K - 68K -
rpool/data/vm-119-disk-2@twodays2go 64K - 68K -
rpool/data/vm-119-disk-2@IOT24H2Update 64K - 68K -
rpool/data/vm-119-disk-2@BeforeWinPro 64K - 68K -
rpool/data/vm-119-state-twodays2go 5.25G 41.2G 5.25G -
rpool/data/vm-119-state-zoom 2.19G 41.2G 2.19G -
rpool/data/vm-200-disk-0 116K 41.2G 116K -
rpool/data/vm-200-disk-1 62.1G 41.2G 62.1G -
rpool/data/vm-200-disk-2 68K 41.2G 68K -
rpool/data/vm-201-disk-0 136K 41.2G 136K -
rpool/data/vm-201-disk-1 20.7G 41.2G 20.7G -
rpool/data/vm-201-disk-2 68K 41.2G 68K
Exactly. Default isthey are local backups - Servername -> Backups (*.vma.zst)
/var/lib/vz/dump
There was a swap in ZFS years ago. The default today is “no swap”. You can see whether swap is used in any form withDoes zfs use disc space as memory? a kind of swop file?
swapon -s
So there is no swap file in use.What I noticed when looking over it is that well over 500GB are taken up by snapshots only. Surely some of them can be deleted
When changes are made in a VM, the snapshot also grows. That would explain the increase.
Exactly. Default is/var/lib/vz/dump
There was a swap in ZFS years ago. The default today is “no swap”. You can see whether swap is used in any form with
Code:swapon -s
Ok, I'll ask a bit different... is there even a configured backup job? Please send me the output from:I was in the right locations when looking for the backup -> /var/lib/vz/dump
The backups are not showing there thou. The backups in the attached image are the ones that i am trying to find.
unless i need to mount this location? not sure how to do that thou.
cat /etc/pve/jobs.cfg
cat /etc/pve/storage.cfg