Hallo,
ich habe seit zwei Tagen Probleme mit dem Backup auf ein PBS.
Warning: unable to close filehandle GEN73 properly: No space left on device at /usr/share/perl5/PVE/VZDump/QemuServer.pm line 754.
Meiner Meinung nach sollte genug Speicherplatz frei sein, oder ?
Was nicht mehr gebraucht wird sind die Snapshots. Diese würde ich löschen wenn das okay ist.
for snapshot in `zfs list -H -t snapshot | cut -f 1`; do zfs destroy $snapshot; done
VG
ich habe seit zwei Tagen Probleme mit dem Backup auf ein PBS.
Warning: unable to close filehandle GEN73 properly: No space left on device at /usr/share/perl5/PVE/VZDump/QemuServer.pm line 754.
Meiner Meinung nach sollte genug Speicherplatz frei sein, oder ?
Was nicht mehr gebraucht wird sind die Snapshots. Diese würde ich löschen wenn das okay ist.
for snapshot in `zfs list -H -t snapshot | cut -f 1`; do zfs destroy $snapshot; done
VG
Code:
root@pve:~# df -h
Filesystem Size Used Avail Use% Mounted on
udev 63G 0 63G 0% /dev
tmpfs 13G 834M 12G 7% /run
rpool/ROOT/pve-1 2.0T 1.4G 2.0T 1% /
tmpfs 63G 43M 63G 1% /dev/shm
tmpfs 5.0M 0 5.0M 0% /run/lock
tmpfs 63G 0 63G 0% /sys/fs/cgroup
rpool 2.0T 256K 2.0T 1% /rpool
rpool/data 2.0T 256K 2.0T 1% /rpool/data
rpool/ROOT 2.0T 256K 2.0T 1% /rpool/ROOT
/dev/fuse 30M 24K 30M 1% /etc/pve
192.168.100.2:/volume1/backup 2.7T 1.8T 862G 68% /mnt/pve/NAS-Backup
rpool/data/backup 2.0T 256K 2.0T 1% /rpool/data/backup
tmpfs 13G 0 13G 0% /run/user/0
root@pve:~# du -hs * | sort -h
512 status.cfg
26K backup
Code:
root@pve:~# pveversion -v
proxmox-ve: 6.3-1 (running kernel: 5.4.73-1-pve)
pve-manager: 6.3-2 (running version: 6.3-2/22f57405)
pve-kernel-5.4: 6.3-1
pve-kernel-helper: 6.3-1
pve-kernel-5.4.73-1-pve: 5.4.73-1
ceph-fuse: 12.2.11+dfsg1-2.1+b1
corosync: 3.0.4-pve1
criu: 3.11-3
glusterfs-client: 5.5-3
ifupdown: 0.8.35+pve1
ksm-control-daemon: 1.3-1
libjs-extjs: 6.0.1-10
libknet1: 1.16-pve1
libproxmox-acme-perl: 1.0.5
libproxmox-backup-qemu0: 1.0.2-1
libpve-access-control: 6.1-3
libpve-apiclient-perl: 3.0-3
libpve-common-perl: 6.2-6
libpve-guest-common-perl: 3.1-3
libpve-http-server-perl: 3.0-6
libpve-storage-perl: 6.3-1
libqb0: 1.0.5-1
libspice-server1: 0.14.2-4~pve6+1
lvm2: 2.03.02-pve4
lxc-pve: 4.0.3-1
lxcfs: 4.0.3-pve3
novnc-pve: 1.1.0-1
proxmox-backup-client: 1.0.5-1
proxmox-mini-journalreader: 1.1-1
proxmox-widget-toolkit: 2.4-3
pve-cluster: 6.2-1
pve-container: 3.3-1
pve-docs: 6.3-1
pve-edk2-firmware: 2.20200531-1
pve-firewall: 4.1-3
pve-firmware: 3.1-3
pve-ha-manager: 3.1-1
pve-i18n: 2.2-2
pve-qemu-kvm: 5.1.0-7
pve-xtermjs: 4.7.0-3
qemu-server: 6.3-1
smartmontools: 7.1-pve2
spiceterm: 3.1-1
vncterm: 1.6-2
zfsutils-linux: 0.8.5-pve1
root@pve:~#
Code:
root@pve:~# df -ih
Filesystem Inodes IUsed IFree IUse% Mounted on
udev 16M 708 16M 1% /dev
tmpfs 16M 1.2K 16M 1% /run
rpool/ROOT/pve-1 4.0G 51K 4.0G 1% /
tmpfs 16M 99 16M 1% /dev/shm
tmpfs 16M 19 16M 1% /run/lock
tmpfs 16M 18 16M 1% /sys/fs/cgroup
rpool 4.0G 8 4.0G 1% /rpool
rpool/data 4.0G 7 4.0G 1% /rpool/data
rpool/ROOT 4.0G 7 4.0G 1% /rpool/ROOT
/dev/fuse 9.8K 36 9.8K 1% /etc/pve
192.168.100.2:/volume1/backup 0 0 0 - /mnt/pve/NAS-Backup
rpool/data/backup 4.0G 6 4.0G 1% /rpool/data/backup
tmpfs 16M 10 16M 1% /run/user/0
root@pve:~#
Code:
root@pve:~# ls -lh
total 1.0K
drwxr-xr-x 3 root root 3 Nov 30 2020 backup
-rw-r--r-- 1 root root 0 Jan 29 2021 status.cfg
root@pve:~#
Code:
root@pve:~# ls -lh /var/log/pveproxy
total 1.2M
-rw-r----- 1 www-data www-data 112K Mar 17 06:48 access.log
-rw-r----- 1 www-data www-data 139K Mar 16 06:50 access.log.1
-rw-r----- 1 www-data www-data 27K Mar 15 10:31 access.log.2.gz
-rw-r----- 1 www-data www-data 203K Mar 13 11:37 access.log.3.gz
-rw-r----- 1 www-data www-data 65K Mar 12 12:15 access.log.4.gz
-rw-r----- 1 www-data www-data 3.0K Mar 11 20:30 access.log.5.gz
-rw-r----- 1 www-data www-data 462K Mar 10 18:48 access.log.6.gz
-rw-r----- 1 www-data www-data 288K Mar 10 00:00 access.log.7.gz
root@pve:~#
Code:
root@pve:~# zfs list
NAME USED AVAIL REFER MOUNTPOINT
rpool 2.91T 1.99T 151K /rpool
rpool/ROOT 1.38G 1.99T 140K /rpool/ROOT
rpool/ROOT/pve-1 1.38G 1.99T 1.38G /
rpool/data 2.91T 1.99T 140K /rpool/data
rpool/data/backup 140K 1.99T 140K /rpool/data/backup
rpool/data/save 591G 1.99T 591G -
rpool/data/vm-100-disk-0 81.4K 1.99T 81.4K -
rpool/data/vm-100-disk-1 73.0G 1.99T 34.3G -
rpool/data/vm-100-state-aa 6.97G 1.99T 6.97G -
rpool/data/vm-100-state-heute 7.75G 1.99T 7.75G -
rpool/data/vm-100-state-mit_DC 3.24G 1.99T 3.24G -
rpool/data/vm-100-state-mit_updates 969M 1.99T 969M -
rpool/data/vm-100-state-sauber 1.56G 1.99T 1.56G -
rpool/data/vm-100-state-save 7.49G 1.99T 7.49G -
rpool/data/vm-101-disk-0 1.11T 1.99T 950G -
rpool/data/vm-101-disk-1 603G 1.99T 597G -
rpool/data/vm-101-state-aa 75.7G 1.99T 75.7G -
rpool/data/vm-101-state-sauber 5.27G 1.99T 5.27G -
rpool/data/vm-101-state-vorupdate 77.6G 1.99T 77.6G -
rpool/data/vm-102-disk-0 66.6G 1.99T 45.1G -
rpool/data/vm-102-disk-1 320G 1.99T 320G -
root@pve:~#
Code:
root@pve:~# zfs list -t snapshot
NAME USED AVAIL REFER MOUNTPOINT
rpool/data/save@rep_101_2022-03-09_09:00:01 0B - 591G -
rpool/data/vm-100-disk-1@sauber 8.57G - 22.5G -
rpool/data/vm-100-disk-1@mit_updates 730M - 21.3G -
rpool/data/vm-100-disk-1@mit_DC 988M - 21.2G -
rpool/data/vm-100-disk-1@save 3.28G - 21.3G -
rpool/data/vm-100-disk-1@rep_100_2021-06-22_10:15:01 19.9M - 21.2G -
rpool/data/vm-100-disk-1@rep_100_2021-06-22_10:30:25 19.3M - 21.2G -
rpool/data/vm-100-disk-1@rep_100_2021-06-25_11:45:58 723M - 21.1G -
rpool/data/vm-100-disk-1@heute 5.12G - 22.5G -
rpool/data/vm-100-disk-1@aa 5.08G - 31.5G -
rpool/data/vm-100-disk-1@rep_100_2022-03-13_02:00:03 674M - 33.5G -
rpool/data/vm-100-disk-1@rep_100_2022-03-14_02:00:04 367M - 33.5G -
rpool/data/vm-100-disk-1@rep_100_2022-03-15_02:00:01 295M - 33.5G -
rpool/data/vm-100-disk-1@rep_100_2022-03-16_02:00:03 256M - 34.4G -
rpool/data/vm-100-disk-1@rep_100_2022-03-17_02:00:03 98.4M - 34.3G -
rpool/data/vm-101-disk-0@sauber 3.53G - 21.4G -
rpool/data/vm-101-disk-0@test 2.85G - 22.3G -
rpool/data/vm-101-disk-0@vorupdate 38.7G - 943G -
rpool/data/vm-101-disk-0@rep_101_2022-02-01_11:30:01 8.23G - 943G -
rpool/data/vm-101-disk-0@aa 6.20G - 950G -
rpool/data/vm-101-disk-0@rep_101_2022-03-08_11:15:01 51.7M - 949G -
rpool/data/vm-101-disk-0@rep_101_2022-03-08_11:30:01 47.6M - 948G -
rpool/data/vm-101-disk-0@13_44 1.76G - 947G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_08:45:01 1.06G - 948G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_15:30:24 183M - 949G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_15:45:20 131M - 949G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_16:00:15 141M - 949G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_16:15:05 144M - 949G -
rpool/data/vm-101-disk-0@rep_101_2022-03-10_16:30:01 314M - 949G -
rpool/data/vm-101-disk-1@test 639K - 884K -
rpool/data/vm-101-disk-1@rep_101_2022-01-10_13:30:01 1.42G - 576G -
rpool/data/vm-101-disk-1@vorupdate 771M - 577G -
rpool/data/vm-101-disk-1@aa 912M - 585G -
rpool/data/vm-101-disk-1@rep_101_2022-03-08_11:15:01 424K - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-08_11:30:01 424K - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-10_15:30:24 0B - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-10_15:45:20 0B - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-10_16:00:15 0B - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-10_16:15:05 0B - 591G -
rpool/data/vm-101-disk-1@rep_101_2022-03-10_16:30:01 0B - 591G -
rpool/data/vm-102-disk-0@rep_102_2021-06-22_10:15:18 9.42M - 28.9G -
rpool/data/vm-102-disk-0@rep_102_2021-06-22_10:30:16 8.98M - 28.9G -
rpool/data/vm-102-disk-0@rep_102_2021-06-24_10:00:01 953M - 28.9G -
rpool/data/vm-102-disk-0@rep_102_2022-03-13_01:00:07 1.15G - 44.8G -
rpool/data/vm-102-disk-0@rep_102_2022-03-14_01:00:03 515M - 44.7G -
rpool/data/vm-102-disk-0@rep_102_2022-03-15_01:00:03 553M - 44.8G -
rpool/data/vm-102-disk-0@rep_102_2022-03-16_01:00:03 520M - 45.0G -
rpool/data/vm-102-disk-0@rep_102_2022-03-17_01:00:03 181M - 45.1G -
rpool/data/vm-102-disk-1@rep_102_2021-06-21_19:45:04 47.5M - 179G -
rpool/data/vm-102-disk-1@rep_102_2021-06-22_10:15:18 401K - 179G -
rpool/data/vm-102-disk-1@rep_102_2021-06-22_10:30:16 401K - 179G -
rpool/data/vm-102-disk-1@rep_102_2022-03-13_01:00:07 820K - 320G -
rpool/data/vm-102-disk-1@rep_102_2022-03-14_01:00:03 541K - 320G -
rpool/data/vm-102-disk-1@rep_102_2022-03-15_01:00:03 535K - 320G -
rpool/data/vm-102-disk-1@rep_102_2022-03-16_01:00:03 541K - 320G -
rpool/data/vm-102-disk-1@rep_102_2022-03-17_01:00:03 529K - 320G -
root@pve:~#