Hello,
Just installed fresh pve 6.0 on 2 servers, the 2 are identical :
I installed with ZFS for root partition :
When i set limits for ZFS ARC, update initramfs, and reboot, nothing change.
This was working on pve 5.4, but the difference is previously root partition was ext4 and now it's zfs.
Am i missing something ?
Just installed fresh pve 6.0 on 2 servers, the 2 are identical :
root@proxmox02:~# pveversion
pve-manager/6.0-2/865bbe32 (running kernel: 5.0.15-1-pve)
pve-manager/6.0-2/865bbe32 (running kernel: 5.0.15-1-pve)
I installed with ZFS for root partition :
root@proxmox02:~# zpool list
NAME SIZE ALLOC FREE CKPOINT EXPANDSZ FRAG CAP DEDUP HEALTH ALTROOT
rpool 236G 1.02G 235G - - 0% 0% 1.00x ONLINE -
stor-local-zfs 464G 1.38M 464G - - 0% 0% 1.00x ONLINE -
root@proxmox02:~# zfs list
NAME USED AVAIL REFER MOUNTPOINT
rpool 1.01G 228G 104K /rpool
rpool/ROOT 1.01G 228G 96K /rpool/ROOT
rpool/ROOT/pve-1 1.01G 228G 1.01G /
rpool/data 112K 228G 112K /rpool/data
stor-local-zfs 1.22M 449G 96K /stor-local-zfs
NAME SIZE ALLOC FREE CKPOINT EXPANDSZ FRAG CAP DEDUP HEALTH ALTROOT
rpool 236G 1.02G 235G - - 0% 0% 1.00x ONLINE -
stor-local-zfs 464G 1.38M 464G - - 0% 0% 1.00x ONLINE -
root@proxmox02:~# zfs list
NAME USED AVAIL REFER MOUNTPOINT
rpool 1.01G 228G 104K /rpool
rpool/ROOT 1.01G 228G 96K /rpool/ROOT
rpool/ROOT/pve-1 1.01G 228G 1.01G /
rpool/data 112K 228G 112K /rpool/data
stor-local-zfs 1.22M 449G 96K /stor-local-zfs
When i set limits for ZFS ARC, update initramfs, and reboot, nothing change.
This was working on pve 5.4, but the difference is previously root partition was ext4 and now it's zfs.
root@proxmox02:~# cat /etc/modprobe.d/zfs.conf
# Minimum ZFS ARC : 512 MB
options zfs zfs_arc_min=536870912
# Maximum ZFS ARC : 4 GB
options zfs zfs_arc_max=4294967296
root@proxmox02:~# update-initramfs -u -k all
update-initramfs: Generating /boot/initrd.img-5.0.15-1-pve
root@proxmox02:~# reboot
root@proxmox02:~# grep c_ /proc/spl/kstat/zfs/arcstats
c_min 4 1053766656
c_max 4 16860266496
arc_no_grow 4 0
arc_tempreserve 4 0
arc_loaned_bytes 4 0
arc_prune 4 0
arc_meta_used 4 92992024
arc_meta_limit 4 12645199872
arc_dnode_limit 4 1264519987
arc_meta_max 4 127125672
arc_meta_min 4 16777216
async_upgrade_sync 4 58
arc_need_free 4 0
arc_sys_free 4 526883328
arc_raw_size 4 0
root@proxmox02:~# arc_summary
------------------------------------------------------------------------
ZFS Subsystem Report Mon Jul 15 00:36:26 2019
Linux 5.0.15-1-pve 0.8.1-pve1
Machine: proxmox02 (x86_64) 0.8.1-pve1
ARC status: HEALTHY
Memory throttle count: 0
ARC size (current): 3.6 % 579.3 MiB
Target size (adaptive): 100.0 % 15.7 GiB
Min size (hard limit): 6.2 % 1005.0 MiB
Max size (high water): 16:1 15.7 GiB
Most Frequently Used (MFU) cache size: 32.0 % 180.3 MiB
Most Recently Used (MRU) cache size: 68.0 % 383.3 MiB
Metadata cache size (hard limit): 75.0 % 11.8 GiB
Metadata cache size (current): 0.7 % 88.7 MiB
Dnode cache size (hard limit): 10.0 % 1.2 GiB
Dnode cache size (current): 0.6 % 6.9 MiB
# Minimum ZFS ARC : 512 MB
options zfs zfs_arc_min=536870912
# Maximum ZFS ARC : 4 GB
options zfs zfs_arc_max=4294967296
root@proxmox02:~# update-initramfs -u -k all
update-initramfs: Generating /boot/initrd.img-5.0.15-1-pve
root@proxmox02:~# reboot
root@proxmox02:~# grep c_ /proc/spl/kstat/zfs/arcstats
c_min 4 1053766656
c_max 4 16860266496
arc_no_grow 4 0
arc_tempreserve 4 0
arc_loaned_bytes 4 0
arc_prune 4 0
arc_meta_used 4 92992024
arc_meta_limit 4 12645199872
arc_dnode_limit 4 1264519987
arc_meta_max 4 127125672
arc_meta_min 4 16777216
async_upgrade_sync 4 58
arc_need_free 4 0
arc_sys_free 4 526883328
arc_raw_size 4 0
root@proxmox02:~# arc_summary
------------------------------------------------------------------------
ZFS Subsystem Report Mon Jul 15 00:36:26 2019
Linux 5.0.15-1-pve 0.8.1-pve1
Machine: proxmox02 (x86_64) 0.8.1-pve1
ARC status: HEALTHY
Memory throttle count: 0
ARC size (current): 3.6 % 579.3 MiB
Target size (adaptive): 100.0 % 15.7 GiB
Min size (hard limit): 6.2 % 1005.0 MiB
Max size (high water): 16:1 15.7 GiB
Most Frequently Used (MFU) cache size: 32.0 % 180.3 MiB
Most Recently Used (MRU) cache size: 68.0 % 383.3 MiB
Metadata cache size (hard limit): 75.0 % 11.8 GiB
Metadata cache size (current): 0.7 % 88.7 MiB
Dnode cache size (hard limit): 10.0 % 1.2 GiB
Dnode cache size (current): 0.6 % 6.9 MiB
Am i missing something ?