ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = [u'/root/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python2.7/site-packages/ansible executable location = /usr/bin/ansible-playbook python version = 2.7.5 (default, Nov 14 2023, 16:14:06) [GCC 4.8.5 20150623 (Red Hat 4.8.5-44)] Using /etc/ansible/ansible.cfg as config file [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_resize.yml ***************************************************** 1 plays in /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml PLAY [Test resize] ************************************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:2 Wednesday 30 July 2025 21:27:09 -0400 (0:00:00.377) 0:00:00.377 ******** ok: [managed-node13] META: ran handlers TASK [Run the role] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:21 Wednesday 30 July 2025 21:27:14 -0400 (0:00:05.594) 0:00:05.972 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:27:15 -0400 (0:00:00.329) 0:00:06.301 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:27:15 -0400 (0:00:00.494) 0:00:06.795 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:27:15 -0400 (0:00:00.493) 0:00:07.288 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:27:16 -0400 (0:00:00.760) 0:00:08.049 ******** ok: [managed-node13] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:27:19 -0400 (0:00:02.433) 0:00:10.483 ******** ok: [managed-node13] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:27:19 -0400 (0:00:00.501) 0:00:10.984 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:27:19 -0400 (0:00:00.289) 0:00:11.273 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:27:20 -0400 (0:00:00.279) 0:00:11.552 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:27:21 -0400 (0:00:00.914) 0:00:12.467 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed", "libblockdev-2.18-5.el7.x86_64 providing libblockdev is already installed" ] } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:27:26 -0400 (0:00:05.138) 0:00:17.606 ******** ok: [managed-node13] => { "storage_pools | d([])": [] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:27:26 -0400 (0:00:00.666) 0:00:18.272 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:27:27 -0400 (0:00:00.486) 0:00:18.759 ******** ok: [managed-node13] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:27:31 -0400 (0:00:04.069) 0:00:22.829 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Wednesday 30 July 2025 21:27:32 -0400 (0:00:00.923) 0:00:23.753 ******** TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Wednesday 30 July 2025 21:27:32 -0400 (0:00:00.244) 0:00:23.997 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Wednesday 30 July 2025 21:27:32 -0400 (0:00:00.238) 0:00:24.236 ******** TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:27:33 -0400 (0:00:00.210) 0:00:24.446 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:27:34 -0400 (0:00:01.707) 0:00:26.153 ******** ok: [managed-node13] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "unknown" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:27:39 -0400 (0:00:04.172) 0:00:30.326 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:27:39 -0400 (0:00:00.589) 0:00:30.915 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:27:39 -0400 (0:00:00.328) 0:00:31.244 ******** ok: [managed-node13] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:27:42 -0400 (0:00:02.063) 0:00:33.308 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:27:42 -0400 (0:00:00.825) 0:00:34.133 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925162.0608578, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1753925159.7668476, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925159.7668476, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:27:44 -0400 (0:00:01.628) 0:00:35.761 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:27:44 -0400 (0:00:00.405) 0:00:36.167 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:27:45 -0400 (0:00:00.382) 0:00:36.550 ******** ok: [managed-node13] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:27:45 -0400 (0:00:00.399) 0:00:36.950 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:27:46 -0400 (0:00:00.420) 0:00:37.371 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:27:46 -0400 (0:00:00.611) 0:00:37.983 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:27:47 -0400 (0:00:00.490) 0:00:38.473 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:27:47 -0400 (0:00:00.388) 0:00:38.861 ******** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:27:47 -0400 (0:00:00.426) 0:00:39.287 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:27:48 -0400 (0:00:00.355) 0:00:39.643 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:27:48 -0400 (0:00:00.230) 0:00:39.874 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:27:50 -0400 (0:00:01.743) 0:00:41.617 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:27:50 -0400 (0:00:00.287) 0:00:41.905 ******** ok: [managed-node13] TASK [Mark tasks to be skipped] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:25 Wednesday 30 July 2025 21:27:53 -0400 (0:00:02.444) 0:00:44.349 ******** ok: [managed-node13] => { "ansible_facts": { "storage_skip_checks": [ "blivet_available", "packages_installed", "service_facts" ] }, "changed": false } TASK [Get unused disks] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:32 Wednesday 30 July 2025 21:27:53 -0400 (0:00:00.463) 0:00:44.813 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node13 TASK [Ensure test packages] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 Wednesday 30 July 2025 21:27:54 -0400 (0:00:00.679) 0:00:45.492 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 Wednesday 30 July 2025 21:27:56 -0400 (0:00:02.102) 0:00:47.595 ******** ok: [managed-node13] => { "changed": false, "disks": [ "sda" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'fstype': '', 'type': 'disk', 'ssize': '512', 'size': '268435456000'}] has partitions" ] } TASK [Debug why there are no unused disks] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20 Wednesday 30 July 2025 21:28:00 -0400 (0:00:04.118) 0:00:51.713 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29 Wednesday 30 July 2025 21:28:00 -0400 (0:00:00.416) 0:00:52.130 ******** ok: [managed-node13] => { "ansible_facts": { "unused_disks": [ "sda" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34 Wednesday 30 July 2025 21:28:01 -0400 (0:00:00.494) 0:00:52.624 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39 Wednesday 30 July 2025 21:28:01 -0400 (0:00:00.550) 0:00:53.174 ******** ok: [managed-node13] => { "unused_disks": [ "sda" ] } TASK [Create one LVM logical volume under one volume group with size 5g] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:42 Wednesday 30 July 2025 21:28:02 -0400 (0:00:00.424) 0:00:53.599 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:28:03 -0400 (0:00:00.897) 0:00:54.496 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:28:03 -0400 (0:00:00.695) 0:00:55.191 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:28:04 -0400 (0:00:00.481) 0:00:55.673 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:28:05 -0400 (0:00:01.002) 0:00:56.676 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:28:05 -0400 (0:00:00.483) 0:00:57.159 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:28:06 -0400 (0:00:00.395) 0:00:57.554 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:28:06 -0400 (0:00:00.484) 0:00:58.038 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:28:07 -0400 (0:00:00.454) 0:00:58.493 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:28:08 -0400 (0:00:00.960) 0:00:59.454 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:28:08 -0400 (0:00:00.377) 0:00:59.832 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:28:09 -0400 (0:00:00.685) 0:01:00.517 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:28:09 -0400 (0:00:00.259) 0:01:00.776 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:28:09 -0400 (0:00:00.463) 0:01:01.240 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:28:10 -0400 (0:00:00.412) 0:01:01.653 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:28:10 -0400 (0:00:00.351) 0:01:02.005 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:28:11 -0400 (0:00:00.752) 0:01:02.757 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:28:11 -0400 (0:00:00.406) 0:01:03.164 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:28:12 -0400 (0:00:00.340) 0:01:03.505 ******** changed: [managed-node13] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:28:19 -0400 (0:00:07.468) 0:01:10.973 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:28:20 -0400 (0:00:00.403) 0:01:11.376 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925162.0608578, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1753925159.7668476, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925159.7668476, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:28:22 -0400 (0:00:02.006) 0:01:13.383 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:28:25 -0400 (0:00:03.573) 0:01:16.957 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:28:26 -0400 (0:00:00.486) 0:01:17.444 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:28:26 -0400 (0:00:00.562) 0:01:18.006 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:28:27 -0400 (0:00:00.444) 0:01:18.451 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:28:27 -0400 (0:00:00.456) 0:01:18.908 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:28:27 -0400 (0:00:00.355) 0:01:19.264 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:28:32 -0400 (0:00:04.675) 0:01:23.939 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:28:36 -0400 (0:00:03.356) 0:01:27.296 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:28:36 -0400 (0:00:00.563) 0:01:27.860 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:28:38 -0400 (0:00:01.971) 0:01:29.831 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:28:40 -0400 (0:00:01.915) 0:01:31.746 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:28:40 -0400 (0:00:00.443) 0:01:32.190 ******** ok: [managed-node13] TASK [Verify role results] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:59 Wednesday 30 July 2025 21:28:43 -0400 (0:00:02.566) 0:01:34.757 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:28:44 -0400 (0:00:00.883) 0:01:35.640 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:28:44 -0400 (0:00:00.548) 0:01:36.189 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:28:45 -0400 (0:00:00.465) 0:01:36.654 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "8238077b-eb6b-41e5-b2ee-e708a84ef837" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "751BWi-d6YA-IKpf-c9J0-drej-CD9q-jHrN2V" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:28:48 -0400 (0:00:03.599) 0:01:40.253 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002581", "end": "2025-07-30 21:28:52.415744", "rc": 0, "start": "2025-07-30 21:28:52.413163" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:28:52 -0400 (0:00:03.984) 0:01:44.238 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002519", "end": "2025-07-30 21:28:54.357400", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:28:54.354881" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:28:55 -0400 (0:00:02.079) 0:01:46.318 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:28:56 -0400 (0:00:01.016) 0:01:47.334 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:28:56 -0400 (0:00:00.297) 0:01:47.631 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.017984", "end": "2025-07-30 21:28:57.800076", "rc": 0, "start": "2025-07-30 21:28:57.782092" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:28:58 -0400 (0:00:01.967) 0:01:49.599 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:28:58 -0400 (0:00:00.512) 0:01:50.111 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:28:59 -0400 (0:00:00.978) 0:01:51.089 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:29:00 -0400 (0:00:00.553) 0:01:51.643 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:29:03 -0400 (0:00:02.725) 0:01:54.369 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:29:03 -0400 (0:00:00.482) 0:01:54.851 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:29:03 -0400 (0:00:00.397) 0:01:55.248 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:29:04 -0400 (0:00:00.449) 0:01:55.698 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:29:04 -0400 (0:00:00.596) 0:01:56.294 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:29:05 -0400 (0:00:00.649) 0:01:56.944 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:29:05 -0400 (0:00:00.326) 0:01:57.271 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:29:06 -0400 (0:00:00.782) 0:01:58.054 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:29:08 -0400 (0:00:01.955) 0:02:00.010 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:29:09 -0400 (0:00:00.500) 0:02:00.510 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:29:10 -0400 (0:00:01.180) 0:02:01.690 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:29:10 -0400 (0:00:00.350) 0:02:02.040 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:29:11 -0400 (0:00:00.418) 0:02:02.458 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:29:11 -0400 (0:00:00.387) 0:02:02.846 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:29:11 -0400 (0:00:00.391) 0:02:03.238 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:29:12 -0400 (0:00:00.424) 0:02:03.662 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:29:12 -0400 (0:00:00.343) 0:02:04.005 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:29:13 -0400 (0:00:00.442) 0:02:04.448 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:29:13 -0400 (0:00:00.460) 0:02:04.908 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:29:13 -0400 (0:00:00.370) 0:02:05.279 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:29:14 -0400 (0:00:00.415) 0:02:05.694 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:29:14 -0400 (0:00:00.301) 0:02:05.996 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:29:15 -0400 (0:00:00.841) 0:02:06.837 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:29:16 -0400 (0:00:00.980) 0:02:07.817 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:29:16 -0400 (0:00:00.306) 0:02:08.124 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:29:17 -0400 (0:00:00.438) 0:02:08.562 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:29:17 -0400 (0:00:00.566) 0:02:09.128 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:29:18 -0400 (0:00:00.676) 0:02:09.805 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:29:18 -0400 (0:00:00.324) 0:02:10.130 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:29:19 -0400 (0:00:00.545) 0:02:10.675 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:29:19 -0400 (0:00:00.461) 0:02:11.137 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:29:20 -0400 (0:00:00.604) 0:02:11.741 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:29:21 -0400 (0:00:01.008) 0:02:12.749 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:29:21 -0400 (0:00:00.472) 0:02:13.221 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:29:22 -0400 (0:00:00.422) 0:02:13.644 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:29:22 -0400 (0:00:00.354) 0:02:13.999 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:29:23 -0400 (0:00:00.505) 0:02:14.505 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:29:24 -0400 (0:00:01.115) 0:02:15.620 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:29:24 -0400 (0:00:00.539) 0:02:16.159 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:29:25 -0400 (0:00:00.538) 0:02:16.698 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:29:26 -0400 (0:00:00.865) 0:02:17.564 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:29:26 -0400 (0:00:00.448) 0:02:18.013 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:29:27 -0400 (0:00:00.565) 0:02:18.578 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:29:27 -0400 (0:00:00.550) 0:02:19.128 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:29:28 -0400 (0:00:00.479) 0:02:19.608 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:29:28 -0400 (0:00:00.330) 0:02:19.938 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:29:29 -0400 (0:00:00.617) 0:02:20.556 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:29:29 -0400 (0:00:00.503) 0:02:21.059 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:29:30 -0400 (0:00:01.032) 0:02:22.092 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:29:31 -0400 (0:00:01.125) 0:02:23.218 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:29:32 -0400 (0:00:00.437) 0:02:23.655 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:29:32 -0400 (0:00:00.512) 0:02:24.168 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:29:33 -0400 (0:00:00.518) 0:02:24.687 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:29:33 -0400 (0:00:00.455) 0:02:25.142 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:29:34 -0400 (0:00:00.357) 0:02:25.499 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:29:34 -0400 (0:00:00.313) 0:02:25.813 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:29:34 -0400 (0:00:00.440) 0:02:26.253 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:29:36 -0400 (0:00:01.528) 0:02:27.782 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:29:37 -0400 (0:00:00.550) 0:02:28.332 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:29:37 -0400 (0:00:00.436) 0:02:28.769 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:29:38 -0400 (0:00:00.720) 0:02:29.490 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:29:38 -0400 (0:00:00.354) 0:02:29.844 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:29:39 -0400 (0:00:00.493) 0:02:30.338 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:29:39 -0400 (0:00:00.549) 0:02:30.887 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:29:40 -0400 (0:00:00.567) 0:02:31.454 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:29:41 -0400 (0:00:00.895) 0:02:32.350 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:29:42 -0400 (0:00:00.967) 0:02:33.317 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:29:42 -0400 (0:00:00.513) 0:02:33.830 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:29:44 -0400 (0:00:02.267) 0:02:36.098 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:29:45 -0400 (0:00:00.568) 0:02:36.666 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:29:45 -0400 (0:00:00.477) 0:02:37.143 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:29:46 -0400 (0:00:00.612) 0:02:37.756 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:29:47 -0400 (0:00:00.639) 0:02:38.396 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:29:47 -0400 (0:00:00.591) 0:02:38.987 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:29:48 -0400 (0:00:00.504) 0:02:39.492 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:29:48 -0400 (0:00:00.664) 0:02:40.156 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:29:49 -0400 (0:00:00.465) 0:02:40.622 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:29:49 -0400 (0:00:00.439) 0:02:41.061 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:29:50 -0400 (0:00:00.351) 0:02:41.413 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:29:50 -0400 (0:00:00.513) 0:02:41.926 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:29:51 -0400 (0:00:00.578) 0:02:42.587 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:29:51 -0400 (0:00:00.664) 0:02:43.251 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:29:52 -0400 (0:00:00.644) 0:02:43.895 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:29:52 -0400 (0:00:00.379) 0:02:44.275 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:29:53 -0400 (0:00:00.569) 0:02:44.845 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:29:53 -0400 (0:00:00.435) 0:02:45.281 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:29:54 -0400 (0:00:00.619) 0:02:45.900 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:29:55 -0400 (0:00:00.558) 0:02:46.459 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925299.0984519, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925299.0984519, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 178219, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925299.0984519, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:29:57 -0400 (0:00:01.930) 0:02:48.389 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:29:57 -0400 (0:00:00.497) 0:02:48.887 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:29:58 -0400 (0:00:00.460) 0:02:49.347 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:29:58 -0400 (0:00:00.520) 0:02:49.868 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:29:59 -0400 (0:00:00.437) 0:02:50.306 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:29:59 -0400 (0:00:00.523) 0:02:50.829 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:30:00 -0400 (0:00:00.605) 0:02:51.435 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:30:00 -0400 (0:00:00.464) 0:02:51.900 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:30:02 -0400 (0:00:02.329) 0:02:54.229 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:30:03 -0400 (0:00:00.398) 0:02:54.627 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:30:03 -0400 (0:00:00.484) 0:02:55.112 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:30:04 -0400 (0:00:01.011) 0:02:56.124 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:30:05 -0400 (0:00:00.466) 0:02:56.591 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:30:05 -0400 (0:00:00.499) 0:02:57.090 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:30:06 -0400 (0:00:00.447) 0:02:57.538 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:30:06 -0400 (0:00:00.400) 0:02:57.938 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:30:07 -0400 (0:00:00.621) 0:02:58.560 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:30:07 -0400 (0:00:00.465) 0:02:59.025 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:30:08 -0400 (0:00:00.625) 0:02:59.650 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:30:08 -0400 (0:00:00.634) 0:03:00.285 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:30:09 -0400 (0:00:00.489) 0:03:00.775 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:30:09 -0400 (0:00:00.476) 0:03:01.251 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:30:10 -0400 (0:00:00.380) 0:03:01.631 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:30:10 -0400 (0:00:00.333) 0:03:01.964 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:30:11 -0400 (0:00:00.378) 0:03:02.343 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:30:11 -0400 (0:00:00.382) 0:03:02.725 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:30:11 -0400 (0:00:00.365) 0:03:03.091 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:30:12 -0400 (0:00:00.395) 0:03:03.487 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:30:12 -0400 (0:00:00.424) 0:03:03.911 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:30:12 -0400 (0:00:00.343) 0:03:04.255 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:30:13 -0400 (0:00:00.453) 0:03:04.709 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:30:13 -0400 (0:00:00.222) 0:03:04.932 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:30:14 -0400 (0:00:00.388) 0:03:05.320 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:30:16 -0400 (0:00:02.688) 0:03:08.008 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:30:18 -0400 (0:00:01.827) 0:03:09.836 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:30:19 -0400 (0:00:00.637) 0:03:10.473 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:30:19 -0400 (0:00:00.470) 0:03:10.944 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:30:21 -0400 (0:00:01.903) 0:03:12.848 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:30:21 -0400 (0:00:00.337) 0:03:13.185 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:30:22 -0400 (0:00:00.266) 0:03:13.452 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:30:22 -0400 (0:00:00.430) 0:03:13.883 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:30:22 -0400 (0:00:00.384) 0:03:14.267 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:30:23 -0400 (0:00:00.442) 0:03:14.709 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:30:23 -0400 (0:00:00.486) 0:03:15.195 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:30:24 -0400 (0:00:00.579) 0:03:15.775 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:30:24 -0400 (0:00:00.470) 0:03:16.246 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:30:25 -0400 (0:00:00.474) 0:03:16.720 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:30:25 -0400 (0:00:00.486) 0:03:17.207 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:30:26 -0400 (0:00:00.308) 0:03:17.516 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:30:26 -0400 (0:00:00.371) 0:03:17.888 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:30:27 -0400 (0:00:00.438) 0:03:18.327 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:30:27 -0400 (0:00:00.345) 0:03:18.672 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:30:27 -0400 (0:00:00.311) 0:03:18.983 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:30:28 -0400 (0:00:00.431) 0:03:19.415 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:30:28 -0400 (0:00:00.482) 0:03:19.897 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:30:28 -0400 (0:00:00.333) 0:03:20.231 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:30:29 -0400 (0:00:00.317) 0:03:20.549 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:30:29 -0400 (0:00:00.345) 0:03:20.894 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:30:29 -0400 (0:00:00.314) 0:03:21.209 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:30:30 -0400 (0:00:00.292) 0:03:21.502 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.020018", "end": "2025-07-30 21:30:31.788156", "rc": 0, "start": "2025-07-30 21:30:31.768138" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:30:32 -0400 (0:00:01.935) 0:03:23.437 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:30:32 -0400 (0:00:00.320) 0:03:23.758 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:30:32 -0400 (0:00:00.491) 0:03:24.249 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:30:33 -0400 (0:00:00.463) 0:03:24.712 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:30:33 -0400 (0:00:00.384) 0:03:25.097 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:30:34 -0400 (0:00:00.431) 0:03:25.528 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:30:34 -0400 (0:00:00.475) 0:03:26.003 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:30:35 -0400 (0:00:00.411) 0:03:26.415 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:30:35 -0400 (0:00:00.347) 0:03:26.762 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change volume_size to 9g] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:62 Wednesday 30 July 2025 21:30:35 -0400 (0:00:00.384) 0:03:27.147 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:30:37 -0400 (0:00:01.355) 0:03:28.503 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:30:37 -0400 (0:00:00.541) 0:03:29.044 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:30:38 -0400 (0:00:00.364) 0:03:29.408 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:30:38 -0400 (0:00:00.808) 0:03:30.217 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:30:39 -0400 (0:00:00.401) 0:03:30.618 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:30:39 -0400 (0:00:00.317) 0:03:30.936 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:30:40 -0400 (0:00:00.523) 0:03:31.460 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:30:40 -0400 (0:00:00.368) 0:03:31.828 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:30:41 -0400 (0:00:00.933) 0:03:32.761 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:30:41 -0400 (0:00:00.379) 0:03:33.141 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "9g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:30:42 -0400 (0:00:00.588) 0:03:33.730 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:30:42 -0400 (0:00:00.418) 0:03:34.148 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:30:43 -0400 (0:00:00.434) 0:03:34.583 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:30:43 -0400 (0:00:00.395) 0:03:34.978 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:30:44 -0400 (0:00:00.366) 0:03:35.344 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:30:44 -0400 (0:00:00.417) 0:03:35.762 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:30:45 -0400 (0:00:00.545) 0:03:36.307 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:30:45 -0400 (0:00:00.379) 0:03:36.687 ******** changed: [managed-node13] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:30:51 -0400 (0:00:06.231) 0:03:42.919 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:30:51 -0400 (0:00:00.355) 0:03:43.274 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925315.527523, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6b95df90798c1adba66c669f2bd76d2d16d86df5", "ctime": 1753925315.524523, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925315.524523, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:30:53 -0400 (0:00:01.816) 0:03:45.090 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:30:55 -0400 (0:00:01.618) 0:03:46.709 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:30:55 -0400 (0:00:00.260) 0:03:46.969 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:30:55 -0400 (0:00:00.322) 0:03:47.291 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:30:56 -0400 (0:00:00.479) 0:03:47.771 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:30:56 -0400 (0:00:00.377) 0:03:48.148 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:30:57 -0400 (0:00:00.482) 0:03:48.631 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:30:59 -0400 (0:00:02.002) 0:03:50.633 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:31:01 -0400 (0:00:01.750) 0:03:52.384 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:31:01 -0400 (0:00:00.317) 0:03:52.702 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:31:03 -0400 (0:00:02.222) 0:03:54.924 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:31:05 -0400 (0:00:01.579) 0:03:56.504 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:31:05 -0400 (0:00:00.342) 0:03:56.846 ******** ok: [managed-node13] TASK [Verify role results - 2] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:76 Wednesday 30 July 2025 21:31:09 -0400 (0:00:03.490) 0:04:00.337 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:31:09 -0400 (0:00:00.745) 0:04:01.082 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:31:10 -0400 (0:00:00.430) 0:04:01.512 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:31:10 -0400 (0:00:00.422) 0:04:01.935 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "9G", "type": "lvm", "uuid": "8238077b-eb6b-41e5-b2ee-e708a84ef837" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "751BWi-d6YA-IKpf-c9J0-drej-CD9q-jHrN2V" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:31:12 -0400 (0:00:01.789) 0:04:03.725 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002888", "end": "2025-07-30 21:31:13.477637", "rc": 0, "start": "2025-07-30 21:31:13.474749" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:31:13 -0400 (0:00:01.418) 0:04:05.143 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002545", "end": "2025-07-30 21:31:15.107192", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:31:15.104647" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:31:15 -0400 (0:00:01.623) 0:04:06.767 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:31:16 -0400 (0:00:00.789) 0:04:07.557 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:31:16 -0400 (0:00:00.353) 0:04:07.911 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.017827", "end": "2025-07-30 21:31:17.770071", "rc": 0, "start": "2025-07-30 21:31:17.752244" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:31:18 -0400 (0:00:01.522) 0:04:09.433 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:31:18 -0400 (0:00:00.425) 0:04:09.859 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:31:19 -0400 (0:00:00.788) 0:04:10.647 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:31:19 -0400 (0:00:00.501) 0:04:11.149 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:31:21 -0400 (0:00:01.543) 0:04:12.692 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:31:21 -0400 (0:00:00.433) 0:04:13.126 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:31:22 -0400 (0:00:00.488) 0:04:13.614 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:31:22 -0400 (0:00:00.522) 0:04:14.136 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:31:23 -0400 (0:00:00.508) 0:04:14.645 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:31:23 -0400 (0:00:00.489) 0:04:15.134 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:31:24 -0400 (0:00:00.410) 0:04:15.544 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:31:24 -0400 (0:00:00.548) 0:04:16.093 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:31:26 -0400 (0:00:01.646) 0:04:17.740 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:31:26 -0400 (0:00:00.405) 0:04:18.146 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:31:27 -0400 (0:00:00.846) 0:04:18.992 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:31:28 -0400 (0:00:00.412) 0:04:19.405 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:31:28 -0400 (0:00:00.472) 0:04:19.877 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:31:28 -0400 (0:00:00.336) 0:04:20.213 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:31:29 -0400 (0:00:00.361) 0:04:20.575 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:31:29 -0400 (0:00:00.363) 0:04:20.938 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:31:30 -0400 (0:00:00.716) 0:04:21.655 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:31:30 -0400 (0:00:00.390) 0:04:22.045 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:31:31 -0400 (0:00:00.275) 0:04:22.321 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:31:31 -0400 (0:00:00.361) 0:04:22.683 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:31:31 -0400 (0:00:00.294) 0:04:22.978 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:31:32 -0400 (0:00:00.441) 0:04:23.419 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:31:32 -0400 (0:00:00.811) 0:04:24.231 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:31:33 -0400 (0:00:00.811) 0:04:25.042 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:31:33 -0400 (0:00:00.243) 0:04:25.286 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:31:34 -0400 (0:00:00.428) 0:04:25.714 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:31:34 -0400 (0:00:00.418) 0:04:26.133 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:31:35 -0400 (0:00:00.467) 0:04:26.600 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:31:35 -0400 (0:00:00.366) 0:04:26.967 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:31:36 -0400 (0:00:00.366) 0:04:27.334 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:31:36 -0400 (0:00:00.317) 0:04:27.651 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:31:37 -0400 (0:00:00.949) 0:04:28.601 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:31:37 -0400 (0:00:00.677) 0:04:29.279 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:31:38 -0400 (0:00:00.354) 0:04:29.634 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:31:38 -0400 (0:00:00.510) 0:04:30.144 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:31:39 -0400 (0:00:00.433) 0:04:30.577 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:31:39 -0400 (0:00:00.433) 0:04:31.011 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:31:40 -0400 (0:00:00.785) 0:04:31.797 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:31:41 -0400 (0:00:00.635) 0:04:32.432 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:31:41 -0400 (0:00:00.343) 0:04:32.775 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:31:42 -0400 (0:00:00.607) 0:04:33.382 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:31:42 -0400 (0:00:00.496) 0:04:33.879 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:31:43 -0400 (0:00:00.603) 0:04:34.482 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:31:43 -0400 (0:00:00.258) 0:04:34.741 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:31:43 -0400 (0:00:00.294) 0:04:35.035 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:31:44 -0400 (0:00:00.366) 0:04:35.402 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:31:44 -0400 (0:00:00.358) 0:04:35.760 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:31:44 -0400 (0:00:00.531) 0:04:36.292 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:31:45 -0400 (0:00:00.794) 0:04:37.087 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:31:46 -0400 (0:00:00.913) 0:04:38.000 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:31:47 -0400 (0:00:00.368) 0:04:38.368 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:31:47 -0400 (0:00:00.298) 0:04:38.667 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:31:47 -0400 (0:00:00.349) 0:04:39.016 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:31:48 -0400 (0:00:00.393) 0:04:39.409 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:31:48 -0400 (0:00:00.476) 0:04:39.886 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:31:48 -0400 (0:00:00.327) 0:04:40.214 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:31:49 -0400 (0:00:00.250) 0:04:40.464 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:31:50 -0400 (0:00:01.109) 0:04:41.574 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:31:50 -0400 (0:00:00.519) 0:04:42.093 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:31:51 -0400 (0:00:00.598) 0:04:42.692 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:31:51 -0400 (0:00:00.346) 0:04:43.038 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:31:52 -0400 (0:00:00.351) 0:04:43.390 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:31:52 -0400 (0:00:00.414) 0:04:43.804 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:31:52 -0400 (0:00:00.381) 0:04:44.186 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:31:53 -0400 (0:00:00.408) 0:04:44.595 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:31:53 -0400 (0:00:00.387) 0:04:44.982 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:31:54 -0400 (0:00:00.705) 0:04:45.688 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:31:55 -0400 (0:00:00.658) 0:04:46.346 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:31:57 -0400 (0:00:01.979) 0:04:48.326 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:31:57 -0400 (0:00:00.677) 0:04:49.004 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:31:58 -0400 (0:00:00.604) 0:04:49.609 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:31:58 -0400 (0:00:00.451) 0:04:50.060 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:31:59 -0400 (0:00:00.375) 0:04:50.436 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:31:59 -0400 (0:00:00.425) 0:04:50.862 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:32:00 -0400 (0:00:00.491) 0:04:51.354 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:32:00 -0400 (0:00:00.295) 0:04:51.650 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:32:00 -0400 (0:00:00.416) 0:04:52.092 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:32:01 -0400 (0:00:00.292) 0:04:52.385 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:32:01 -0400 (0:00:00.469) 0:04:52.854 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:32:01 -0400 (0:00:00.335) 0:04:53.190 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:32:02 -0400 (0:00:00.784) 0:04:53.974 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:32:03 -0400 (0:00:00.430) 0:04:54.405 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:32:03 -0400 (0:00:00.525) 0:04:54.930 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:32:04 -0400 (0:00:00.391) 0:04:55.322 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:32:04 -0400 (0:00:00.540) 0:04:55.862 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:32:04 -0400 (0:00:00.315) 0:04:56.177 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:32:05 -0400 (0:00:00.580) 0:04:56.758 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:32:06 -0400 (0:00:00.572) 0:04:57.330 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925451.1531112, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925451.1531112, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 178219, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925451.1531112, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:32:07 -0400 (0:00:01.603) 0:04:58.934 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:32:08 -0400 (0:00:00.485) 0:04:59.419 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:32:08 -0400 (0:00:00.410) 0:04:59.829 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:32:08 -0400 (0:00:00.428) 0:05:00.258 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:32:09 -0400 (0:00:00.357) 0:05:00.615 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:32:09 -0400 (0:00:00.507) 0:05:01.123 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:32:10 -0400 (0:00:00.399) 0:05:01.522 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:32:10 -0400 (0:00:00.301) 0:05:01.824 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:32:12 -0400 (0:00:01.873) 0:05:03.697 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:32:12 -0400 (0:00:00.396) 0:05:04.094 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:32:13 -0400 (0:00:00.426) 0:05:04.520 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:32:14 -0400 (0:00:00.964) 0:05:05.485 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:32:14 -0400 (0:00:00.358) 0:05:05.843 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:32:14 -0400 (0:00:00.306) 0:05:06.150 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:32:15 -0400 (0:00:00.312) 0:05:06.462 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:32:15 -0400 (0:00:00.337) 0:05:06.800 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:32:15 -0400 (0:00:00.363) 0:05:07.163 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:32:16 -0400 (0:00:00.294) 0:05:07.458 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:32:16 -0400 (0:00:00.378) 0:05:07.836 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:32:17 -0400 (0:00:00.519) 0:05:08.356 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:32:17 -0400 (0:00:00.481) 0:05:08.838 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:32:17 -0400 (0:00:00.325) 0:05:09.163 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:32:18 -0400 (0:00:00.299) 0:05:09.463 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:32:18 -0400 (0:00:00.483) 0:05:09.946 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:32:18 -0400 (0:00:00.195) 0:05:10.142 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:32:19 -0400 (0:00:00.284) 0:05:10.426 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:32:19 -0400 (0:00:00.273) 0:05:10.700 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:32:19 -0400 (0:00:00.245) 0:05:10.945 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:32:19 -0400 (0:00:00.193) 0:05:11.139 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:32:20 -0400 (0:00:00.363) 0:05:11.503 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:32:20 -0400 (0:00:00.575) 0:05:12.078 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:32:21 -0400 (0:00:00.345) 0:05:12.423 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:32:21 -0400 (0:00:00.393) 0:05:12.817 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:32:23 -0400 (0:00:01.772) 0:05:14.589 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:32:24 -0400 (0:00:01.491) 0:05:16.080 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "9663676416" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:32:25 -0400 (0:00:00.466) 0:05:16.547 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:32:25 -0400 (0:00:00.373) 0:05:16.921 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:32:27 -0400 (0:00:01.907) 0:05:18.828 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:32:28 -0400 (0:00:00.473) 0:05:19.301 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:32:28 -0400 (0:00:00.444) 0:05:19.745 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:32:28 -0400 (0:00:00.490) 0:05:20.236 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:32:29 -0400 (0:00:00.483) 0:05:20.720 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:32:29 -0400 (0:00:00.372) 0:05:21.093 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:32:30 -0400 (0:00:00.478) 0:05:21.571 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:32:30 -0400 (0:00:00.418) 0:05:21.990 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:32:31 -0400 (0:00:00.418) 0:05:22.409 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:32:31 -0400 (0:00:00.644) 0:05:23.054 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:32:32 -0400 (0:00:00.352) 0:05:23.406 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:32:32 -0400 (0:00:00.380) 0:05:23.787 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:32:32 -0400 (0:00:00.325) 0:05:24.112 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:32:33 -0400 (0:00:00.469) 0:05:24.582 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:32:33 -0400 (0:00:00.321) 0:05:24.903 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:32:33 -0400 (0:00:00.327) 0:05:25.231 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:32:34 -0400 (0:00:00.445) 0:05:25.677 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:32:34 -0400 (0:00:00.355) 0:05:26.032 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:32:35 -0400 (0:00:00.399) 0:05:26.432 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:32:35 -0400 (0:00:00.590) 0:05:27.023 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 9663676416, "changed": false, "failed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:32:36 -0400 (0:00:00.466) 0:05:27.489 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:32:36 -0400 (0:00:00.410) 0:05:27.899 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:32:37 -0400 (0:00:00.398) 0:05:28.298 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.020192", "end": "2025-07-30 21:32:38.280357", "rc": 0, "start": "2025-07-30 21:32:38.260165" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:32:38 -0400 (0:00:01.722) 0:05:30.021 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:32:39 -0400 (0:00:00.452) 0:05:30.474 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:32:39 -0400 (0:00:00.567) 0:05:31.041 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:32:40 -0400 (0:00:00.500) 0:05:31.542 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:32:40 -0400 (0:00:00.392) 0:05:31.935 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:32:40 -0400 (0:00:00.336) 0:05:32.271 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:32:41 -0400 (0:00:00.410) 0:05:32.682 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:32:41 -0400 (0:00:00.452) 0:05:33.135 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:32:42 -0400 (0:00:00.444) 0:05:33.579 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change volume size to 5g] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:79 Wednesday 30 July 2025 21:32:42 -0400 (0:00:00.445) 0:05:34.025 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:32:43 -0400 (0:00:01.103) 0:05:35.128 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:32:44 -0400 (0:00:00.565) 0:05:35.694 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:32:45 -0400 (0:00:00.697) 0:05:36.392 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:32:46 -0400 (0:00:00.954) 0:05:37.346 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:32:46 -0400 (0:00:00.399) 0:05:37.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:32:46 -0400 (0:00:00.232) 0:05:37.978 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:32:46 -0400 (0:00:00.316) 0:05:38.295 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:32:47 -0400 (0:00:00.477) 0:05:38.772 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:32:48 -0400 (0:00:00.888) 0:05:39.661 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:32:48 -0400 (0:00:00.342) 0:05:40.004 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:32:49 -0400 (0:00:00.671) 0:05:40.676 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:32:49 -0400 (0:00:00.387) 0:05:41.063 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:32:50 -0400 (0:00:00.349) 0:05:41.413 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:32:50 -0400 (0:00:00.456) 0:05:41.870 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:32:51 -0400 (0:00:00.514) 0:05:42.385 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:32:51 -0400 (0:00:00.627) 0:05:43.012 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:32:52 -0400 (0:00:00.544) 0:05:43.556 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:32:52 -0400 (0:00:00.305) 0:05:43.861 ******** changed: [managed-node13] => { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:32:58 -0400 (0:00:06.279) 0:05:50.141 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:32:59 -0400 (0:00:00.232) 0:05:50.374 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925315.527523, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6b95df90798c1adba66c669f2bd76d2d16d86df5", "ctime": 1753925315.524523, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925315.524523, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:33:00 -0400 (0:00:01.442) 0:05:51.817 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:33:02 -0400 (0:00:01.570) 0:05:53.388 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:33:02 -0400 (0:00:00.413) 0:05:53.801 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:33:03 -0400 (0:00:00.608) 0:05:54.409 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:33:03 -0400 (0:00:00.484) 0:05:54.894 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:33:03 -0400 (0:00:00.396) 0:05:55.290 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:33:04 -0400 (0:00:00.513) 0:05:55.804 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:33:06 -0400 (0:00:01.727) 0:05:57.531 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:33:08 -0400 (0:00:01.978) 0:05:59.509 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:33:08 -0400 (0:00:00.504) 0:06:00.014 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:33:10 -0400 (0:00:01.825) 0:06:01.839 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:33:12 -0400 (0:00:01.504) 0:06:03.344 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:33:12 -0400 (0:00:00.404) 0:06:03.748 ******** ok: [managed-node13] TASK [Verify role results - 3] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:93 Wednesday 30 July 2025 21:33:16 -0400 (0:00:03.553) 0:06:07.301 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:33:16 -0400 (0:00:00.700) 0:06:08.002 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:33:17 -0400 (0:00:00.478) 0:06:08.481 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:33:17 -0400 (0:00:00.572) 0:06:09.053 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "8238077b-eb6b-41e5-b2ee-e708a84ef837" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "751BWi-d6YA-IKpf-c9J0-drej-CD9q-jHrN2V" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:33:19 -0400 (0:00:01.648) 0:06:10.702 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002607", "end": "2025-07-30 21:33:20.605276", "rc": 0, "start": "2025-07-30 21:33:20.602669" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:33:21 -0400 (0:00:01.618) 0:06:12.320 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002501", "end": "2025-07-30 21:33:22.205513", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:33:22.203012" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:33:22 -0400 (0:00:01.564) 0:06:13.885 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:33:23 -0400 (0:00:00.833) 0:06:14.719 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:33:23 -0400 (0:00:00.332) 0:06:15.052 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018110", "end": "2025-07-30 21:33:24.932044", "rc": 0, "start": "2025-07-30 21:33:24.913934" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:33:25 -0400 (0:00:01.525) 0:06:16.578 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:33:25 -0400 (0:00:00.469) 0:06:17.047 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:33:26 -0400 (0:00:00.808) 0:06:17.856 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:33:27 -0400 (0:00:00.463) 0:06:18.319 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:33:28 -0400 (0:00:01.378) 0:06:19.698 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:33:28 -0400 (0:00:00.438) 0:06:20.213 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:33:29 -0400 (0:00:00.365) 0:06:20.578 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:33:29 -0400 (0:00:00.344) 0:06:20.923 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:33:30 -0400 (0:00:00.391) 0:06:21.315 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:33:30 -0400 (0:00:00.459) 0:06:21.774 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:33:30 -0400 (0:00:00.310) 0:06:22.084 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:33:31 -0400 (0:00:00.504) 0:06:22.589 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:33:32 -0400 (0:00:01.565) 0:06:24.155 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:33:33 -0400 (0:00:00.379) 0:06:24.534 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:33:33 -0400 (0:00:00.727) 0:06:25.261 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:33:34 -0400 (0:00:00.322) 0:06:25.584 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:33:34 -0400 (0:00:00.438) 0:06:26.022 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:33:35 -0400 (0:00:00.394) 0:06:26.417 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:33:35 -0400 (0:00:00.408) 0:06:26.826 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:33:35 -0400 (0:00:00.353) 0:06:27.179 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:33:36 -0400 (0:00:00.335) 0:06:27.515 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:33:36 -0400 (0:00:00.323) 0:06:27.838 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:33:36 -0400 (0:00:00.337) 0:06:28.176 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:33:37 -0400 (0:00:00.680) 0:06:28.856 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:33:37 -0400 (0:00:00.347) 0:06:29.204 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:33:38 -0400 (0:00:00.386) 0:06:29.590 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:33:39 -0400 (0:00:00.749) 0:06:30.340 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:33:39 -0400 (0:00:00.725) 0:06:31.065 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:33:40 -0400 (0:00:00.325) 0:06:31.391 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:33:40 -0400 (0:00:00.326) 0:06:31.717 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:33:40 -0400 (0:00:00.485) 0:06:32.203 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:33:41 -0400 (0:00:00.323) 0:06:32.526 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:33:41 -0400 (0:00:00.313) 0:06:32.839 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:33:41 -0400 (0:00:00.237) 0:06:33.077 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:33:42 -0400 (0:00:00.342) 0:06:33.419 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:33:42 -0400 (0:00:00.770) 0:06:34.190 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:33:43 -0400 (0:00:00.718) 0:06:34.909 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:33:43 -0400 (0:00:00.380) 0:06:35.289 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:33:44 -0400 (0:00:00.223) 0:06:35.512 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:33:44 -0400 (0:00:00.335) 0:06:35.848 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:33:44 -0400 (0:00:00.356) 0:06:36.204 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:33:45 -0400 (0:00:00.747) 0:06:36.951 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:33:46 -0400 (0:00:00.395) 0:06:37.346 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:33:46 -0400 (0:00:00.269) 0:06:37.616 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:33:47 -0400 (0:00:00.702) 0:06:38.318 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:33:47 -0400 (0:00:00.365) 0:06:38.684 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:33:47 -0400 (0:00:00.414) 0:06:39.098 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:33:48 -0400 (0:00:00.357) 0:06:39.455 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:33:48 -0400 (0:00:00.342) 0:06:39.798 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:33:48 -0400 (0:00:00.325) 0:06:40.124 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:33:49 -0400 (0:00:00.488) 0:06:40.613 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:33:49 -0400 (0:00:00.362) 0:06:40.975 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:33:50 -0400 (0:00:00.743) 0:06:41.719 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:33:50 -0400 (0:00:00.554) 0:06:42.274 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:33:51 -0400 (0:00:00.271) 0:06:42.583 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:33:51 -0400 (0:00:00.348) 0:06:42.931 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:33:51 -0400 (0:00:00.302) 0:06:43.234 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:33:52 -0400 (0:00:00.359) 0:06:43.593 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:33:52 -0400 (0:00:00.301) 0:06:43.894 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:33:52 -0400 (0:00:00.298) 0:06:44.193 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:33:53 -0400 (0:00:00.391) 0:06:44.584 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:33:54 -0400 (0:00:00.774) 0:06:45.359 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:33:54 -0400 (0:00:00.361) 0:06:45.721 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:33:54 -0400 (0:00:00.292) 0:06:46.014 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:33:55 -0400 (0:00:00.289) 0:06:46.304 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:33:55 -0400 (0:00:00.409) 0:06:46.713 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:33:55 -0400 (0:00:00.344) 0:06:47.058 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:33:56 -0400 (0:00:00.261) 0:06:47.320 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:33:56 -0400 (0:00:00.630) 0:06:47.950 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:33:56 -0400 (0:00:00.294) 0:06:48.245 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:33:57 -0400 (0:00:00.639) 0:06:48.884 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:33:57 -0400 (0:00:00.295) 0:06:49.179 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:33:59 -0400 (0:00:01.416) 0:06:50.596 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:33:59 -0400 (0:00:00.335) 0:06:50.931 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:33:59 -0400 (0:00:00.332) 0:06:51.264 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:34:00 -0400 (0:00:00.271) 0:06:51.535 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:34:00 -0400 (0:00:00.189) 0:06:51.725 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:34:00 -0400 (0:00:00.326) 0:06:52.051 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:34:01 -0400 (0:00:00.262) 0:06:52.313 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:34:01 -0400 (0:00:00.347) 0:06:52.661 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:34:01 -0400 (0:00:00.320) 0:06:52.981 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:34:01 -0400 (0:00:00.311) 0:06:53.293 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:34:02 -0400 (0:00:00.285) 0:06:53.578 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:34:02 -0400 (0:00:00.318) 0:06:53.896 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:34:03 -0400 (0:00:00.530) 0:06:54.427 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:34:03 -0400 (0:00:00.387) 0:06:54.815 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:34:03 -0400 (0:00:00.315) 0:06:55.130 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:34:04 -0400 (0:00:00.261) 0:06:55.392 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:34:04 -0400 (0:00:00.336) 0:06:55.728 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:34:04 -0400 (0:00:00.255) 0:06:55.983 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:34:05 -0400 (0:00:00.333) 0:06:56.317 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:34:05 -0400 (0:00:00.214) 0:06:56.532 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925578.3346627, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925578.3346627, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 178219, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925578.3346627, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:34:06 -0400 (0:00:01.098) 0:06:57.630 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:34:06 -0400 (0:00:00.317) 0:06:57.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:34:06 -0400 (0:00:00.153) 0:06:58.102 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:34:07 -0400 (0:00:00.221) 0:06:58.324 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:34:07 -0400 (0:00:00.330) 0:06:58.654 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:34:07 -0400 (0:00:00.207) 0:06:58.862 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:34:07 -0400 (0:00:00.273) 0:06:59.135 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:34:08 -0400 (0:00:00.213) 0:06:59.349 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:34:09 -0400 (0:00:01.326) 0:07:00.676 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:34:09 -0400 (0:00:00.165) 0:07:00.841 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:34:09 -0400 (0:00:00.160) 0:07:01.002 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:34:10 -0400 (0:00:00.319) 0:07:01.321 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:34:10 -0400 (0:00:00.301) 0:07:01.622 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:34:10 -0400 (0:00:00.191) 0:07:01.814 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:34:10 -0400 (0:00:00.249) 0:07:02.063 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:34:10 -0400 (0:00:00.186) 0:07:02.249 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:34:11 -0400 (0:00:00.248) 0:07:02.498 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:34:11 -0400 (0:00:00.285) 0:07:02.784 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:34:11 -0400 (0:00:00.205) 0:07:02.989 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:34:11 -0400 (0:00:00.212) 0:07:03.202 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:34:12 -0400 (0:00:00.286) 0:07:03.489 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:34:12 -0400 (0:00:00.269) 0:07:03.758 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:34:12 -0400 (0:00:00.320) 0:07:04.079 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:34:13 -0400 (0:00:00.301) 0:07:04.380 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:34:13 -0400 (0:00:00.267) 0:07:04.647 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:34:13 -0400 (0:00:00.168) 0:07:04.816 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:34:13 -0400 (0:00:00.330) 0:07:05.146 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:34:14 -0400 (0:00:00.537) 0:07:05.684 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:34:14 -0400 (0:00:00.150) 0:07:05.834 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:34:14 -0400 (0:00:00.258) 0:07:06.093 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:34:15 -0400 (0:00:00.327) 0:07:06.420 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:34:15 -0400 (0:00:00.380) 0:07:06.801 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:34:15 -0400 (0:00:00.342) 0:07:07.144 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:34:17 -0400 (0:00:01.389) 0:07:08.534 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:34:18 -0400 (0:00:01.299) 0:07:09.833 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:34:18 -0400 (0:00:00.313) 0:07:10.147 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:34:19 -0400 (0:00:00.296) 0:07:10.443 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:34:20 -0400 (0:00:01.467) 0:07:11.911 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:34:21 -0400 (0:00:00.529) 0:07:12.440 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:34:21 -0400 (0:00:00.342) 0:07:12.812 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:34:21 -0400 (0:00:00.317) 0:07:13.129 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:34:22 -0400 (0:00:00.356) 0:07:13.485 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:34:22 -0400 (0:00:00.300) 0:07:13.786 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:34:22 -0400 (0:00:00.362) 0:07:14.149 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:34:23 -0400 (0:00:00.445) 0:07:14.594 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:34:23 -0400 (0:00:00.303) 0:07:14.897 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:34:23 -0400 (0:00:00.354) 0:07:15.252 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:34:24 -0400 (0:00:00.211) 0:07:15.464 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:34:24 -0400 (0:00:00.277) 0:07:15.741 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:34:24 -0400 (0:00:00.275) 0:07:16.016 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:34:25 -0400 (0:00:00.453) 0:07:16.470 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:34:25 -0400 (0:00:00.217) 0:07:16.687 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:34:25 -0400 (0:00:00.306) 0:07:16.993 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:34:25 -0400 (0:00:00.271) 0:07:17.264 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:34:26 -0400 (0:00:00.268) 0:07:17.533 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:34:26 -0400 (0:00:00.278) 0:07:17.812 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:34:26 -0400 (0:00:00.337) 0:07:18.150 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:34:27 -0400 (0:00:00.260) 0:07:18.410 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:34:27 -0400 (0:00:00.407) 0:07:18.817 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:34:27 -0400 (0:00:00.357) 0:07:19.175 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.021392", "end": "2025-07-30 21:34:29.010010", "rc": 0, "start": "2025-07-30 21:34:28.988618" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:34:29 -0400 (0:00:01.419) 0:07:20.594 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:34:29 -0400 (0:00:00.361) 0:07:20.956 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:34:30 -0400 (0:00:00.390) 0:07:21.346 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:34:30 -0400 (0:00:00.348) 0:07:21.695 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:34:30 -0400 (0:00:00.333) 0:07:22.029 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:34:30 -0400 (0:00:00.242) 0:07:22.272 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:34:31 -0400 (0:00:00.294) 0:07:22.566 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:34:31 -0400 (0:00:00.247) 0:07:22.814 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:34:31 -0400 (0:00:00.158) 0:07:22.972 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Test for correct handling of too-large volume size] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:96 Wednesday 30 July 2025 21:34:31 -0400 (0:00:00.169) 0:07:23.142 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node13 TASK [Store global variable value copy] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4 Wednesday 30 July 2025 21:34:32 -0400 (0:00:00.458) 0:07:23.600 ******** ok: [managed-node13] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": false, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10 Wednesday 30 July 2025 21:34:32 -0400 (0:00:00.325) 0:07:23.925 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:34:33 -0400 (0:00:00.389) 0:07:24.315 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:34:33 -0400 (0:00:00.257) 0:07:24.573 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:34:33 -0400 (0:00:00.124) 0:07:24.697 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:34:33 -0400 (0:00:00.436) 0:07:25.134 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:34:34 -0400 (0:00:00.271) 0:07:25.406 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:34:34 -0400 (0:00:00.340) 0:07:25.746 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:34:34 -0400 (0:00:00.130) 0:07:25.877 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:34:34 -0400 (0:00:00.073) 0:07:25.951 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:34:35 -0400 (0:00:00.700) 0:07:26.652 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:34:35 -0400 (0:00:00.337) 0:07:26.989 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "12884901888.0" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:34:36 -0400 (0:00:00.366) 0:07:27.356 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:34:36 -0400 (0:00:00.293) 0:07:27.650 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:34:36 -0400 (0:00:00.199) 0:07:27.850 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:34:36 -0400 (0:00:00.309) 0:07:28.159 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:34:37 -0400 (0:00:00.338) 0:07:28.498 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:34:37 -0400 (0:00:00.330) 0:07:28.828 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:34:37 -0400 (0:00:00.377) 0:07:29.206 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:34:38 -0400 (0:00:00.209) 0:07:29.416 ******** fatal: [managed-node13]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: volume 'test1' cannot be resized to '12 GiB' TASK [fedora.linux_system_roles.storage : Failed message] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111 Wednesday 30 July 2025 21:34:43 -0400 (0:00:05.239) 0:07:34.656 ******** fatal: [managed-node13]: FAILED! => { "changed": false } MSG: {u'_ansible_no_log': False, u'crypts': [], u'pools': [], u'leaves': [], u'changed': False, u'actions': [], u'failed': True, u'volumes': [], u'invocation': {u'module_args': {u'packages_only': False, u'uses_kmod_kvdo': True, u'disklabel_type': None, u'safe_mode': False, u'diskvolume_mkfs_option_map': {u'ext4': u'-F', u'ext3': u'-F', u'ext2': u'-F'}, u'pools': [{u'raid_metadata_version': None, u'encryption_key_size': None, u'encryption_key': None, u'encryption_luks_version': None, u'encryption_tang_url': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_tang_thumbprint': None, u'name': u'foo', u'encryption_password': None, u'encryption': False, u'disks': [u'sda'], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [{u'raid_metadata_version': None, u'mount_device_identifier': u'uuid', u'fs_type': u'ext4', u'mount_options': u'defaults', u'size': u'12884901888.0', u'mount_point': u'/opt/test1', u'compression': None, u'encryption_password': None, u'encryption': False, u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'vdo_pool_size': None, u'mount_mode': None, u'thin_pool_name': None, u'type': u'lvm', u'encryption_key_size': None, u'deduplication': None, u'encryption_cipher': None, u'encryption_key': None, u'fs_label': u'', u'encryption_luks_version': None, u'raid_stripe_size': None, u'mount_passno': 0, u'mount_user': None, u'raid_spare_count': None, u'name': u'test1', u'cache_mode': None, u'raid_disks': [], u'mount_group': None, u'fs_overwrite_existing': True, u'disks': [u'sda'], u'cached': False, u'thin_pool_size': None, u'thin': False, u'mount_check': 0, u'cache_size': 0, u'raid_chunk_size': None, u'cache_devices': [], u'fs_create_options': u''}], u'shared': False, u'encryption_clevis_pin': None, u'type': u'lvm', u'encryption_cipher': None, u'raid_chunk_size': None}], u'volumes': [], u'pool_defaults': {u'raid_metadata_version': None, u'encryption_cipher': None, u'encryption_key': None, u'encryption_luks_version': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_password': None, u'encryption': False, u'disks': [], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [], u'shared': False, u'type': u'lvm', u'encryption_key_size': None, u'raid_chunk_size': None}, u'volume_defaults': {u'raid_metadata_version': None, u'raid_level': None, u'fs_type': u'xfs', u'mount_options': u'defaults', u'size': 0, u'mount_point': u'', u'compression': None, u'encryption_password': None, u'encryption': False, u'mount_device_identifier': u'uuid', u'raid_device_count': None, u'state': u'present', u'vdo_pool_size': None, u'thin_pool_name': None, u'type': u'lvm', u'encryption_key_size': None, u'encryption_cipher': None, u'encryption_key': None, u'fs_label': u'', u'encryption_luks_version': None, u'raid_stripe_size': None, u'cache_size': 0, u'raid_spare_count': None, u'cache_mode': None, u'deduplication': None, u'cached': False, u'fs_overwrite_existing': True, u'disks': [], u'thin_pool_size': None, u'thin': None, u'mount_check': 0, u'mount_passno': 0, u'raid_chunk_size': None, u'cache_devices': [], u'fs_create_options': u''}, u'use_partitions': None}}, u'mounts': [], u'packages': [], u'msg': u"volume 'test1' cannot be resized to '12 GiB'"} TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:34:43 -0400 (0:00:00.397) 0:07:35.053 ******** TASK [Check that we failed in the role] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23 Wednesday 30 July 2025 21:34:44 -0400 (0:00:00.279) 0:07:35.333 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28 Wednesday 30 July 2025 21:34:44 -0400 (0:00:00.372) 0:07:35.705 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39 Wednesday 30 July 2025 21:34:44 -0400 (0:00:00.256) 0:07:35.962 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Try to create LVM with volume size equal disk's size, resize to 10737418240] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:114 Wednesday 30 July 2025 21:34:44 -0400 (0:00:00.267) 0:07:36.229 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:34:45 -0400 (0:00:00.290) 0:07:36.520 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:34:45 -0400 (0:00:00.269) 0:07:36.789 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:34:45 -0400 (0:00:00.298) 0:07:37.088 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:34:46 -0400 (0:00:00.490) 0:07:37.578 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:34:46 -0400 (0:00:00.224) 0:07:37.803 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:34:46 -0400 (0:00:00.249) 0:07:38.053 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:34:47 -0400 (0:00:00.255) 0:07:38.308 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:34:47 -0400 (0:00:00.264) 0:07:38.572 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:34:47 -0400 (0:00:00.609) 0:07:39.181 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:34:48 -0400 (0:00:00.251) 0:07:39.433 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "10737418240" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:34:48 -0400 (0:00:00.456) 0:07:39.889 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:34:48 -0400 (0:00:00.249) 0:07:40.139 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:34:49 -0400 (0:00:00.333) 0:07:40.473 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:34:49 -0400 (0:00:00.271) 0:07:40.744 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:34:49 -0400 (0:00:00.238) 0:07:40.983 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:34:49 -0400 (0:00:00.165) 0:07:41.148 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:34:50 -0400 (0:00:00.393) 0:07:41.542 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:34:50 -0400 (0:00:00.297) 0:07:41.839 ******** changed: [managed-node13] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10737418240", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:34:56 -0400 (0:00:05.608) 0:07:47.448 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:34:56 -0400 (0:00:00.325) 0:07:47.774 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925315.527523, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6b95df90798c1adba66c669f2bd76d2d16d86df5", "ctime": 1753925315.524523, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925315.524523, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:34:57 -0400 (0:00:01.315) 0:07:49.090 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:34:59 -0400 (0:00:01.370) 0:07:50.460 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:34:59 -0400 (0:00:00.217) 0:07:50.678 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10737418240", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:34:59 -0400 (0:00:00.374) 0:07:51.052 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10737418240", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:35:00 -0400 (0:00:00.264) 0:07:51.316 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:35:00 -0400 (0:00:00.276) 0:07:51.593 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:35:00 -0400 (0:00:00.242) 0:07:51.835 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:35:01 -0400 (0:00:01.374) 0:07:53.209 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:35:03 -0400 (0:00:01.373) 0:07:54.583 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:35:03 -0400 (0:00:00.283) 0:07:54.866 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:35:04 -0400 (0:00:01.226) 0:07:56.093 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:35:05 -0400 (0:00:01.061) 0:07:57.155 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:35:06 -0400 (0:00:00.334) 0:07:57.489 ******** ok: [managed-node13] TASK [Verify role results - 4] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:129 Wednesday 30 July 2025 21:35:07 -0400 (0:00:01.605) 0:07:59.094 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:35:08 -0400 (0:00:00.346) 0:07:59.441 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10737418240", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:35:08 -0400 (0:00:00.421) 0:07:59.862 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:35:08 -0400 (0:00:00.145) 0:08:00.008 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "10G", "type": "lvm", "uuid": "8238077b-eb6b-41e5-b2ee-e708a84ef837" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "751BWi-d6YA-IKpf-c9J0-drej-CD9q-jHrN2V" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:35:10 -0400 (0:00:01.410) 0:08:01.418 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003052", "end": "2025-07-30 21:35:11.063337", "rc": 0, "start": "2025-07-30 21:35:11.060285" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:35:11 -0400 (0:00:01.242) 0:08:02.661 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002419", "end": "2025-07-30 21:35:12.168393", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:35:12.165974" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:35:12 -0400 (0:00:01.037) 0:08:03.698 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:35:12 -0400 (0:00:00.546) 0:08:04.245 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:35:13 -0400 (0:00:00.241) 0:08:04.486 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018550", "end": "2025-07-30 21:35:14.106075", "rc": 0, "start": "2025-07-30 21:35:14.087525" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:35:14 -0400 (0:00:01.249) 0:08:05.735 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:35:14 -0400 (0:00:00.339) 0:08:06.075 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:35:15 -0400 (0:00:00.527) 0:08:06.603 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:35:15 -0400 (0:00:00.239) 0:08:06.842 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:35:16 -0400 (0:00:01.223) 0:08:08.065 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:35:17 -0400 (0:00:00.305) 0:08:08.371 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:35:17 -0400 (0:00:00.228) 0:08:08.599 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:35:17 -0400 (0:00:00.392) 0:08:08.992 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:35:18 -0400 (0:00:00.747) 0:08:09.740 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:35:18 -0400 (0:00:00.385) 0:08:10.126 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:35:19 -0400 (0:00:00.240) 0:08:10.366 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:35:19 -0400 (0:00:00.362) 0:08:10.729 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:35:20 -0400 (0:00:01.304) 0:08:12.033 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:35:20 -0400 (0:00:00.259) 0:08:12.292 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:35:21 -0400 (0:00:00.436) 0:08:12.729 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:35:21 -0400 (0:00:00.262) 0:08:12.992 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:35:21 -0400 (0:00:00.297) 0:08:13.290 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:35:22 -0400 (0:00:00.253) 0:08:13.543 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:35:22 -0400 (0:00:00.309) 0:08:13.852 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:35:22 -0400 (0:00:00.222) 0:08:14.075 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:35:23 -0400 (0:00:00.295) 0:08:14.370 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:35:23 -0400 (0:00:00.269) 0:08:14.640 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:35:23 -0400 (0:00:00.378) 0:08:15.018 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:35:23 -0400 (0:00:00.186) 0:08:15.205 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:35:24 -0400 (0:00:00.280) 0:08:15.485 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:35:24 -0400 (0:00:00.323) 0:08:15.808 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:35:25 -0400 (0:00:00.662) 0:08:16.471 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:35:25 -0400 (0:00:00.581) 0:08:17.052 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:35:26 -0400 (0:00:00.266) 0:08:17.318 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:35:26 -0400 (0:00:00.261) 0:08:17.580 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:35:26 -0400 (0:00:00.277) 0:08:17.857 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:35:26 -0400 (0:00:00.239) 0:08:18.096 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:35:27 -0400 (0:00:00.266) 0:08:18.362 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:35:27 -0400 (0:00:00.195) 0:08:18.558 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:35:27 -0400 (0:00:00.144) 0:08:18.702 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:35:27 -0400 (0:00:00.579) 0:08:19.281 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:35:28 -0400 (0:00:00.525) 0:08:19.807 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:35:28 -0400 (0:00:00.182) 0:08:19.990 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:35:28 -0400 (0:00:00.265) 0:08:20.256 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:35:29 -0400 (0:00:00.264) 0:08:20.520 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:35:29 -0400 (0:00:00.225) 0:08:20.745 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:35:30 -0400 (0:00:00.642) 0:08:21.388 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:35:30 -0400 (0:00:00.270) 0:08:21.658 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:35:30 -0400 (0:00:00.265) 0:08:21.924 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:35:31 -0400 (0:00:00.537) 0:08:22.462 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:35:31 -0400 (0:00:00.254) 0:08:22.717 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:35:31 -0400 (0:00:00.314) 0:08:23.032 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:35:31 -0400 (0:00:00.247) 0:08:23.279 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:35:32 -0400 (0:00:00.276) 0:08:23.556 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:35:32 -0400 (0:00:00.231) 0:08:23.788 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:35:32 -0400 (0:00:00.146) 0:08:23.935 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:35:32 -0400 (0:00:00.293) 0:08:24.228 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:35:33 -0400 (0:00:00.654) 0:08:24.882 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:35:34 -0400 (0:00:00.488) 0:08:25.371 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:35:34 -0400 (0:00:00.327) 0:08:25.698 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:35:35 -0400 (0:00:00.722) 0:08:26.421 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:35:35 -0400 (0:00:00.101) 0:08:26.523 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:35:35 -0400 (0:00:00.089) 0:08:26.612 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:35:35 -0400 (0:00:00.372) 0:08:26.984 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:35:36 -0400 (0:00:00.324) 0:08:27.308 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:35:36 -0400 (0:00:00.297) 0:08:27.606 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:35:37 -0400 (0:00:00.760) 0:08:28.367 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:35:37 -0400 (0:00:00.242) 0:08:28.609 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:35:37 -0400 (0:00:00.244) 0:08:28.854 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:35:37 -0400 (0:00:00.244) 0:08:29.098 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:35:38 -0400 (0:00:00.364) 0:08:29.463 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:35:38 -0400 (0:00:00.246) 0:08:29.709 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:35:38 -0400 (0:00:00.306) 0:08:30.016 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:35:39 -0400 (0:00:00.292) 0:08:30.309 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:35:39 -0400 (0:00:00.237) 0:08:30.546 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:35:39 -0400 (0:00:00.477) 0:08:31.024 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:35:40 -0400 (0:00:00.309) 0:08:31.334 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:35:41 -0400 (0:00:01.307) 0:08:32.642 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:35:41 -0400 (0:00:00.285) 0:08:32.927 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:35:42 -0400 (0:00:00.420) 0:08:33.347 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:35:42 -0400 (0:00:00.331) 0:08:33.679 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:35:42 -0400 (0:00:00.419) 0:08:34.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:35:43 -0400 (0:00:00.408) 0:08:34.508 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:35:43 -0400 (0:00:00.306) 0:08:34.814 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:35:43 -0400 (0:00:00.442) 0:08:35.256 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:35:44 -0400 (0:00:00.332) 0:08:35.588 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:35:44 -0400 (0:00:00.253) 0:08:35.842 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:35:44 -0400 (0:00:00.229) 0:08:36.071 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:35:45 -0400 (0:00:00.307) 0:08:36.379 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:35:45 -0400 (0:00:00.422) 0:08:36.802 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:35:45 -0400 (0:00:00.360) 0:08:37.162 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:35:46 -0400 (0:00:00.324) 0:08:37.487 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:35:46 -0400 (0:00:00.243) 0:08:37.730 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:35:46 -0400 (0:00:00.324) 0:08:38.055 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:35:47 -0400 (0:00:00.257) 0:08:38.312 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:35:47 -0400 (0:00:00.399) 0:08:38.711 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:35:47 -0400 (0:00:00.334) 0:08:39.046 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925695.8001714, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925695.8001714, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 178219, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925695.8001714, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:35:49 -0400 (0:00:01.510) 0:08:40.557 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:35:49 -0400 (0:00:00.357) 0:08:40.914 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:35:49 -0400 (0:00:00.339) 0:08:41.253 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:35:50 -0400 (0:00:00.302) 0:08:41.556 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:35:50 -0400 (0:00:00.361) 0:08:41.917 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:35:51 -0400 (0:00:00.395) 0:08:42.313 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:35:51 -0400 (0:00:00.224) 0:08:42.538 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:35:51 -0400 (0:00:00.286) 0:08:42.824 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:35:53 -0400 (0:00:01.637) 0:08:44.462 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:35:53 -0400 (0:00:00.719) 0:08:45.181 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:35:54 -0400 (0:00:00.252) 0:08:45.434 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:35:54 -0400 (0:00:00.383) 0:08:45.817 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:35:54 -0400 (0:00:00.303) 0:08:46.121 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:35:55 -0400 (0:00:00.386) 0:08:46.507 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:35:55 -0400 (0:00:00.294) 0:08:46.802 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:35:55 -0400 (0:00:00.208) 0:08:47.011 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:35:55 -0400 (0:00:00.260) 0:08:47.272 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:35:56 -0400 (0:00:00.261) 0:08:47.534 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:35:56 -0400 (0:00:00.347) 0:08:47.881 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:35:56 -0400 (0:00:00.287) 0:08:48.168 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:35:57 -0400 (0:00:00.246) 0:08:48.415 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:35:57 -0400 (0:00:00.209) 0:08:48.624 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:35:57 -0400 (0:00:00.218) 0:08:48.843 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:35:57 -0400 (0:00:00.208) 0:08:49.051 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:35:57 -0400 (0:00:00.180) 0:08:49.231 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:35:58 -0400 (0:00:00.074) 0:08:49.306 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:35:58 -0400 (0:00:00.251) 0:08:49.557 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:35:58 -0400 (0:00:00.240) 0:08:49.798 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:35:58 -0400 (0:00:00.299) 0:08:50.098 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:35:59 -0400 (0:00:00.210) 0:08:50.308 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:35:59 -0400 (0:00:00.175) 0:08:50.484 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:35:59 -0400 (0:00:00.226) 0:08:50.710 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:35:59 -0400 (0:00:00.295) 0:08:51.006 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:36:00 -0400 (0:00:01.078) 0:08:52.085 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:36:01 -0400 (0:00:01.100) 0:08:53.185 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "10737418240" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:36:02 -0400 (0:00:00.272) 0:08:53.458 ******** ok: [managed-node13] => { "storage_test_expected_size": "10737418240" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:36:02 -0400 (0:00:00.102) 0:08:53.560 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:36:03 -0400 (0:00:00.997) 0:08:54.557 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:36:03 -0400 (0:00:00.226) 0:08:54.784 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:36:03 -0400 (0:00:00.116) 0:08:54.900 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:36:03 -0400 (0:00:00.113) 0:08:55.014 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:36:03 -0400 (0:00:00.170) 0:08:55.185 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:36:04 -0400 (0:00:00.246) 0:08:55.432 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:36:04 -0400 (0:00:00.370) 0:08:55.802 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:36:04 -0400 (0:00:00.204) 0:08:56.007 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:36:04 -0400 (0:00:00.218) 0:08:56.226 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:36:05 -0400 (0:00:00.155) 0:08:56.381 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:36:05 -0400 (0:00:00.185) 0:08:56.566 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:36:05 -0400 (0:00:00.298) 0:08:56.865 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:36:05 -0400 (0:00:00.274) 0:08:57.140 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:36:06 -0400 (0:00:00.203) 0:08:57.343 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:36:06 -0400 (0:00:00.192) 0:08:57.536 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:36:06 -0400 (0:00:00.234) 0:08:57.771 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:36:06 -0400 (0:00:00.283) 0:08:58.055 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:36:06 -0400 (0:00:00.214) 0:08:58.269 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:36:07 -0400 (0:00:00.295) 0:08:58.565 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:36:07 -0400 (0:00:00.270) 0:08:58.835 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:36:07 -0400 (0:00:00.262) 0:08:59.098 ******** ok: [managed-node13] => { "storage_test_expected_size": "10737418240" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:36:08 -0400 (0:00:00.210) 0:08:59.309 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:36:08 -0400 (0:00:00.286) 0:08:59.595 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.023987", "end": "2025-07-30 21:36:09.080560", "rc": 0, "start": "2025-07-30 21:36:09.056573" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:36:09 -0400 (0:00:00.956) 0:09:00.552 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:36:09 -0400 (0:00:00.108) 0:09:00.661 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:36:09 -0400 (0:00:00.216) 0:09:00.878 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:36:09 -0400 (0:00:00.201) 0:09:01.079 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:36:09 -0400 (0:00:00.169) 0:09:01.249 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:36:10 -0400 (0:00:00.174) 0:09:01.424 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:36:10 -0400 (0:00:00.178) 0:09:01.602 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:36:10 -0400 (0:00:00.230) 0:09:01.833 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:36:10 -0400 (0:00:00.187) 0:09:02.021 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Try to resize LVM volume size to disk size + 1.5 % (less than 2 % than maximum size should be tolerated)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:134 Wednesday 30 July 2025 21:36:10 -0400 (0:00:00.191) 0:09:02.212 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:36:11 -0400 (0:00:00.280) 0:09:02.493 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:36:11 -0400 (0:00:00.246) 0:09:02.740 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:36:11 -0400 (0:00:00.199) 0:09:02.939 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:36:12 -0400 (0:00:00.667) 0:09:03.606 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:36:12 -0400 (0:00:00.250) 0:09:03.857 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:36:12 -0400 (0:00:00.148) 0:09:04.005 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:36:12 -0400 (0:00:00.154) 0:09:04.160 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:36:13 -0400 (0:00:00.276) 0:09:04.437 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:36:13 -0400 (0:00:00.306) 0:09:04.744 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:36:13 -0400 (0:00:00.206) 0:09:04.950 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "10898479513.6" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:36:13 -0400 (0:00:00.275) 0:09:05.226 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:36:14 -0400 (0:00:00.270) 0:09:05.497 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:36:14 -0400 (0:00:00.225) 0:09:05.722 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:36:14 -0400 (0:00:00.110) 0:09:05.833 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:36:14 -0400 (0:00:00.185) 0:09:06.018 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:36:14 -0400 (0:00:00.203) 0:09:06.221 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:36:15 -0400 (0:00:00.319) 0:09:06.541 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:36:15 -0400 (0:00:00.244) 0:09:06.786 ******** ok: [managed-node13] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10898479513.6", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:36:20 -0400 (0:00:04.787) 0:09:11.574 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:36:20 -0400 (0:00:00.194) 0:09:11.768 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925315.527523, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6b95df90798c1adba66c669f2bd76d2d16d86df5", "ctime": 1753925315.524523, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925315.524523, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:36:21 -0400 (0:00:01.060) 0:09:12.829 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:36:21 -0400 (0:00:00.149) 0:09:12.979 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:36:21 -0400 (0:00:00.134) 0:09:13.113 ******** ok: [managed-node13] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10898479513.6", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:36:22 -0400 (0:00:00.307) 0:09:13.420 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10898479513.6", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:36:22 -0400 (0:00:00.318) 0:09:13.739 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:36:22 -0400 (0:00:00.263) 0:09:14.002 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:36:22 -0400 (0:00:00.217) 0:09:14.220 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:36:24 -0400 (0:00:01.207) 0:09:15.428 ******** ok: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:36:25 -0400 (0:00:01.134) 0:09:16.562 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext4', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:36:25 -0400 (0:00:00.191) 0:09:16.753 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:36:26 -0400 (0:00:01.169) 0:09:17.923 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:36:27 -0400 (0:00:01.147) 0:09:19.070 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:36:27 -0400 (0:00:00.171) 0:09:19.242 ******** ok: [managed-node13] TASK [Verify role results - 5] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:149 Wednesday 30 July 2025 21:36:30 -0400 (0:00:02.581) 0:09:21.823 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:36:30 -0400 (0:00:00.436) 0:09:22.260 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "10898479513.6", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:36:31 -0400 (0:00:00.483) 0:09:22.744 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:36:31 -0400 (0:00:00.225) 0:09:22.969 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "10G", "type": "lvm", "uuid": "8238077b-eb6b-41e5-b2ee-e708a84ef837" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "751BWi-d6YA-IKpf-c9J0-drej-CD9q-jHrN2V" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:36:32 -0400 (0:00:01.127) 0:09:24.096 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002596", "end": "2025-07-30 21:36:33.567398", "rc": 0, "start": "2025-07-30 21:36:33.564802" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:36:33 -0400 (0:00:01.060) 0:09:25.156 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002656", "end": "2025-07-30 21:36:34.604695", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:36:34.602039" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:36:34 -0400 (0:00:01.017) 0:09:26.174 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:36:35 -0400 (0:00:00.536) 0:09:26.710 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:36:35 -0400 (0:00:00.227) 0:09:26.938 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019199", "end": "2025-07-30 21:36:36.575052", "rc": 0, "start": "2025-07-30 21:36:36.555853" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:36:36 -0400 (0:00:01.226) 0:09:28.164 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:36:37 -0400 (0:00:00.352) 0:09:28.517 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:36:37 -0400 (0:00:00.510) 0:09:29.028 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:36:38 -0400 (0:00:00.280) 0:09:29.308 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:36:39 -0400 (0:00:01.192) 0:09:30.500 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:36:39 -0400 (0:00:00.276) 0:09:30.777 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:36:39 -0400 (0:00:00.187) 0:09:30.965 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:36:39 -0400 (0:00:00.309) 0:09:31.274 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:36:40 -0400 (0:00:00.234) 0:09:31.509 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:36:40 -0400 (0:00:00.354) 0:09:31.863 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:36:40 -0400 (0:00:00.328) 0:09:32.192 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:36:41 -0400 (0:00:00.357) 0:09:32.549 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:36:42 -0400 (0:00:01.112) 0:09:33.662 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:36:42 -0400 (0:00:00.253) 0:09:33.916 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:36:42 -0400 (0:00:00.339) 0:09:34.255 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:36:43 -0400 (0:00:00.220) 0:09:34.476 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:36:43 -0400 (0:00:00.223) 0:09:34.699 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:36:43 -0400 (0:00:00.219) 0:09:34.919 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:36:43 -0400 (0:00:00.243) 0:09:35.163 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:36:44 -0400 (0:00:00.206) 0:09:35.369 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:36:44 -0400 (0:00:00.243) 0:09:35.613 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:36:44 -0400 (0:00:00.202) 0:09:35.815 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:36:44 -0400 (0:00:00.205) 0:09:36.021 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:36:44 -0400 (0:00:00.191) 0:09:36.212 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:36:45 -0400 (0:00:00.195) 0:09:36.408 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:36:45 -0400 (0:00:00.230) 0:09:36.638 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:36:45 -0400 (0:00:00.465) 0:09:37.104 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:36:46 -0400 (0:00:00.396) 0:09:37.501 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:36:46 -0400 (0:00:00.197) 0:09:37.699 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:36:46 -0400 (0:00:00.193) 0:09:37.892 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:36:46 -0400 (0:00:00.206) 0:09:38.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:36:47 -0400 (0:00:00.263) 0:09:38.362 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:36:47 -0400 (0:00:00.166) 0:09:38.528 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:36:47 -0400 (0:00:00.135) 0:09:38.664 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:36:47 -0400 (0:00:00.182) 0:09:38.847 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:36:47 -0400 (0:00:00.400) 0:09:39.247 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:36:48 -0400 (0:00:00.400) 0:09:39.648 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:36:48 -0400 (0:00:00.104) 0:09:39.752 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:36:48 -0400 (0:00:00.192) 0:09:39.944 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:36:48 -0400 (0:00:00.080) 0:09:40.025 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:36:48 -0400 (0:00:00.126) 0:09:40.152 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:36:49 -0400 (0:00:00.401) 0:09:40.554 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:36:49 -0400 (0:00:00.130) 0:09:40.684 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:36:49 -0400 (0:00:00.194) 0:09:40.879 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:36:49 -0400 (0:00:00.239) 0:09:41.118 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:36:49 -0400 (0:00:00.108) 0:09:41.227 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:36:50 -0400 (0:00:00.143) 0:09:41.371 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:36:50 -0400 (0:00:00.211) 0:09:41.583 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:36:50 -0400 (0:00:00.236) 0:09:41.819 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:36:50 -0400 (0:00:00.208) 0:09:42.028 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:36:50 -0400 (0:00:00.211) 0:09:42.239 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:36:51 -0400 (0:00:00.150) 0:09:42.389 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:36:51 -0400 (0:00:00.296) 0:09:42.685 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:36:51 -0400 (0:00:00.356) 0:09:43.042 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:36:51 -0400 (0:00:00.157) 0:09:43.200 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:36:52 -0400 (0:00:00.277) 0:09:43.477 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:36:52 -0400 (0:00:00.194) 0:09:43.671 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:36:52 -0400 (0:00:00.190) 0:09:43.862 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:36:52 -0400 (0:00:00.136) 0:09:43.998 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:36:52 -0400 (0:00:00.279) 0:09:44.278 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:36:53 -0400 (0:00:00.237) 0:09:44.515 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:36:53 -0400 (0:00:00.722) 0:09:45.238 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.229) 0:09:45.468 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.065) 0:09:45.533 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.191) 0:09:45.725 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.118) 0:09:45.843 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.194) 0:09:46.038 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:36:54 -0400 (0:00:00.237) 0:09:46.275 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:36:55 -0400 (0:00:00.253) 0:09:46.529 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:36:55 -0400 (0:00:00.166) 0:09:46.696 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:36:55 -0400 (0:00:00.512) 0:09:47.208 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:36:56 -0400 (0:00:00.252) 0:09:47.460 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:36:57 -0400 (0:00:01.118) 0:09:48.579 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:36:57 -0400 (0:00:00.247) 0:09:48.827 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:36:57 -0400 (0:00:00.281) 0:09:49.108 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:36:58 -0400 (0:00:00.201) 0:09:49.309 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:36:58 -0400 (0:00:00.179) 0:09:49.489 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:36:58 -0400 (0:00:00.147) 0:09:49.637 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:36:58 -0400 (0:00:00.305) 0:09:49.942 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:36:58 -0400 (0:00:00.317) 0:09:50.260 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:36:59 -0400 (0:00:00.186) 0:09:50.446 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:36:59 -0400 (0:00:00.271) 0:09:50.718 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:36:59 -0400 (0:00:00.257) 0:09:50.976 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:36:59 -0400 (0:00:00.178) 0:09:51.154 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:37:00 -0400 (0:00:00.490) 0:09:51.647 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:37:00 -0400 (0:00:00.202) 0:09:51.850 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:37:00 -0400 (0:00:00.116) 0:09:51.966 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:37:00 -0400 (0:00:00.194) 0:09:52.161 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:37:01 -0400 (0:00:00.358) 0:09:52.519 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:37:01 -0400 (0:00:00.282) 0:09:52.802 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:37:01 -0400 (0:00:00.341) 0:09:53.143 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:37:02 -0400 (0:00:00.340) 0:09:53.484 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925779.9875345, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925779.9875345, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 178219, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925779.9875345, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:37:03 -0400 (0:00:01.060) 0:09:54.544 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:37:03 -0400 (0:00:00.284) 0:09:54.829 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:37:03 -0400 (0:00:00.148) 0:09:54.978 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:37:03 -0400 (0:00:00.210) 0:09:55.189 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:37:04 -0400 (0:00:00.175) 0:09:55.364 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:37:04 -0400 (0:00:00.113) 0:09:55.478 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:37:04 -0400 (0:00:00.126) 0:09:55.605 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:37:04 -0400 (0:00:00.137) 0:09:55.742 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:37:05 -0400 (0:00:01.132) 0:09:56.875 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:37:05 -0400 (0:00:00.185) 0:09:57.060 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:37:05 -0400 (0:00:00.167) 0:09:57.227 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.181) 0:09:57.408 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.090) 0:09:57.499 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.145) 0:09:57.644 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.146) 0:09:57.791 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.115) 0:09:57.906 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.092) 0:09:57.998 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.110) 0:09:58.109 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:37:06 -0400 (0:00:00.121) 0:09:58.240 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.159) 0:09:58.400 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.182) 0:09:58.583 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.156) 0:09:58.740 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.096) 0:09:58.837 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.170) 0:09:59.007 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.116) 0:09:59.123 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:37:07 -0400 (0:00:00.135) 0:09:59.258 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.153) 0:09:59.411 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.147) 0:09:59.559 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.079) 0:09:59.638 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.137) 0:09:59.776 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.105) 0:09:59.881 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.144) 0:10:00.025 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:37:08 -0400 (0:00:00.163) 0:10:00.189 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:37:10 -0400 (0:00:01.164) 0:10:01.353 ******** ok: [managed-node13] => { "bytes": 10898479513, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:37:10 -0400 (0:00:00.845) 0:10:02.199 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "10898479513" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:37:11 -0400 (0:00:00.282) 0:10:02.481 ******** ok: [managed-node13] => { "storage_test_expected_size": "10898479513" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:37:11 -0400 (0:00:00.205) 0:10:02.687 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:37:12 -0400 (0:00:00.972) 0:10:03.660 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:37:12 -0400 (0:00:00.259) 0:10:03.919 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:37:12 -0400 (0:00:00.178) 0:10:04.097 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:37:12 -0400 (0:00:00.080) 0:10:04.178 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:37:13 -0400 (0:00:00.247) 0:10:04.426 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:37:13 -0400 (0:00:00.262) 0:10:04.688 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:37:13 -0400 (0:00:00.243) 0:10:04.931 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:37:13 -0400 (0:00:00.200) 0:10:05.132 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:37:14 -0400 (0:00:00.167) 0:10:05.300 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:37:14 -0400 (0:00:00.223) 0:10:05.524 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:37:14 -0400 (0:00:00.176) 0:10:05.700 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:37:14 -0400 (0:00:00.243) 0:10:05.944 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:37:14 -0400 (0:00:00.193) 0:10:06.138 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:37:15 -0400 (0:00:00.191) 0:10:06.330 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:37:15 -0400 (0:00:00.248) 0:10:06.578 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:37:15 -0400 (0:00:00.196) 0:10:06.775 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:37:15 -0400 (0:00:00.220) 0:10:06.995 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:37:15 -0400 (0:00:00.245) 0:10:07.240 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:37:16 -0400 (0:00:00.293) 0:10:07.534 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:37:16 -0400 (0:00:00.262) 0:10:07.797 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:37:16 -0400 (0:00:00.255) 0:10:08.052 ******** ok: [managed-node13] => { "storage_test_expected_size": "10898479513" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:37:16 -0400 (0:00:00.227) 0:10:08.280 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:37:17 -0400 (0:00:00.252) 0:10:08.533 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.017573", "end": "2025-07-30 21:37:18.119063", "rc": 0, "start": "2025-07-30 21:37:18.101490" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:37:18 -0400 (0:00:01.094) 0:10:09.628 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:37:18 -0400 (0:00:00.201) 0:10:09.829 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:37:18 -0400 (0:00:00.202) 0:10:10.032 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:37:18 -0400 (0:00:00.159) 0:10:10.191 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:37:19 -0400 (0:00:00.149) 0:10:10.341 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:37:19 -0400 (0:00:00.229) 0:10:10.571 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:37:19 -0400 (0:00:00.186) 0:10:10.758 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:37:19 -0400 (0:00:00.306) 0:10:11.065 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:37:19 -0400 (0:00:00.142) 0:10:11.207 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Test for correct handling of invalid size specification] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:152 Wednesday 30 July 2025 21:37:20 -0400 (0:00:00.424) 0:10:11.632 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node13 TASK [Store global variable value copy] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4 Wednesday 30 July 2025 21:37:20 -0400 (0:00:00.525) 0:10:12.158 ******** ok: [managed-node13] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": false, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10 Wednesday 30 July 2025 21:37:20 -0400 (0:00:00.090) 0:10:12.248 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.127) 0:10:12.376 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.129) 0:10:12.505 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.105) 0:10:12.610 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.241) 0:10:12.852 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.082) 0:10:12.934 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.122) 0:10:13.057 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:37:21 -0400 (0:00:00.077) 0:10:13.134 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:37:22 -0400 (0:00:00.239) 0:10:13.373 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:37:22 -0400 (0:00:00.454) 0:10:13.828 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:37:22 -0400 (0:00:00.225) 0:10:14.053 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "xyz GiB" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:37:22 -0400 (0:00:00.212) 0:10:14.266 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:37:23 -0400 (0:00:00.225) 0:10:14.491 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:37:23 -0400 (0:00:00.146) 0:10:14.638 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:37:23 -0400 (0:00:00.132) 0:10:14.771 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:37:23 -0400 (0:00:00.313) 0:10:15.085 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:37:23 -0400 (0:00:00.209) 0:10:15.295 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:37:24 -0400 (0:00:00.219) 0:10:15.515 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:37:24 -0400 (0:00:00.129) 0:10:15.644 ******** fatal: [managed-node13]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: invalid size specification 'xyz GiB' in pool 'foo' TASK [fedora.linux_system_roles.storage : Failed message] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111 Wednesday 30 July 2025 21:37:29 -0400 (0:00:04.957) 0:10:20.602 ******** fatal: [managed-node13]: FAILED! => { "changed": false } MSG: {u'_ansible_no_log': False, u'crypts': [], u'pools': [], u'leaves': [], u'changed': False, u'actions': [], u'failed': True, u'volumes': [], u'invocation': {u'module_args': {u'packages_only': False, u'uses_kmod_kvdo': True, u'disklabel_type': None, u'safe_mode': False, u'diskvolume_mkfs_option_map': {u'ext4': u'-F', u'ext3': u'-F', u'ext2': u'-F'}, u'pools': [{u'raid_metadata_version': None, u'encryption_key_size': None, u'encryption_key': None, u'encryption_luks_version': None, u'encryption_tang_url': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_tang_thumbprint': None, u'name': u'foo', u'encryption_password': None, u'encryption': False, u'disks': [u'sda'], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [{u'thin': False, u'encryption_key_size': None, u'encryption_key': None, u'fs_label': None, u'encryption_luks_version': None, u'raid_stripe_size': None, u'fs_type': u'ext4', u'mount_options': None, u'type': None, u'mount_user': None, u'raid_disks': [], u'size': u'xyz GiB', u'cache_mode': None, u'mount_point': u'/opt/test1', u'compression': None, u'encryption_password': None, u'mount_group': None, u'encryption': None, u'raid_level': None, u'name': u'test1', u'cached': None, u'state': u'present', u'vdo_pool_size': None, u'mount_mode': None, u'cache_size': None, u'thin_pool_name': None, u'cache_devices': [], u'thin_pool_size': None, u'encryption_cipher': None, u'fs_create_options': None, u'deduplication': None}], u'shared': False, u'encryption_clevis_pin': None, u'type': u'lvm', u'encryption_cipher': None, u'raid_chunk_size': None}], u'volumes': [], u'pool_defaults': {u'raid_metadata_version': None, u'encryption_cipher': None, u'encryption_key': None, u'encryption_luks_version': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_password': None, u'encryption': False, u'disks': [], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [], u'shared': False, u'type': u'lvm', u'encryption_key_size': None, u'raid_chunk_size': None}, u'volume_defaults': {u'raid_metadata_version': None, u'raid_level': None, u'fs_type': u'xfs', u'mount_options': u'defaults', u'size': 0, u'mount_point': u'', u'compression': None, u'encryption_password': None, u'encryption': False, u'mount_device_identifier': u'uuid', u'raid_device_count': None, u'state': u'present', u'vdo_pool_size': None, u'thin_pool_name': None, u'type': u'lvm', u'encryption_key_size': None, u'encryption_cipher': None, u'encryption_key': None, u'fs_label': u'', u'encryption_luks_version': None, u'raid_stripe_size': None, u'cache_size': 0, u'raid_spare_count': None, u'cache_mode': None, u'deduplication': None, u'cached': False, u'fs_overwrite_existing': True, u'disks': [], u'thin_pool_size': None, u'thin': None, u'mount_check': 0, u'mount_passno': 0, u'raid_chunk_size': None, u'cache_devices': [], u'fs_create_options': u''}, u'use_partitions': None}}, u'mounts': [], u'packages': [], u'msg': u"invalid size specification 'xyz GiB' in pool 'foo'"} TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:37:29 -0400 (0:00:00.167) 0:10:20.770 ******** TASK [Check that we failed in the role] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23 Wednesday 30 July 2025 21:37:29 -0400 (0:00:00.104) 0:10:20.875 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28 Wednesday 30 July 2025 21:37:29 -0400 (0:00:00.091) 0:10:20.966 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39 Wednesday 30 July 2025 21:37:29 -0400 (0:00:00.248) 0:10:21.215 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Test for correct handling of invalid size specification - 2] ************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:168 Wednesday 30 July 2025 21:37:30 -0400 (0:00:00.191) 0:10:21.407 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node13 TASK [Store global variable value copy] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4 Wednesday 30 July 2025 21:37:30 -0400 (0:00:00.417) 0:10:21.824 ******** ok: [managed-node13] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": false, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10 Wednesday 30 July 2025 21:37:30 -0400 (0:00:00.176) 0:10:22.000 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:37:31 -0400 (0:00:00.300) 0:10:22.301 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:37:31 -0400 (0:00:00.398) 0:10:22.699 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:37:31 -0400 (0:00:00.189) 0:10:22.889 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:37:31 -0400 (0:00:00.313) 0:10:23.203 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:37:32 -0400 (0:00:00.187) 0:10:23.390 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:37:32 -0400 (0:00:00.162) 0:10:23.553 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:37:32 -0400 (0:00:00.127) 0:10:23.681 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:37:32 -0400 (0:00:00.123) 0:10:23.804 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:37:33 -0400 (0:00:00.505) 0:10:24.310 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:37:33 -0400 (0:00:00.420) 0:10:24.730 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "ext4", "mount_point": "/opt/test1", "name": "test1", "size": "none" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:37:33 -0400 (0:00:00.201) 0:10:24.932 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:37:33 -0400 (0:00:00.281) 0:10:25.213 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:37:34 -0400 (0:00:00.183) 0:10:25.397 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:37:34 -0400 (0:00:00.245) 0:10:25.642 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:37:34 -0400 (0:00:00.103) 0:10:25.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:37:34 -0400 (0:00:00.162) 0:10:25.908 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:37:34 -0400 (0:00:00.278) 0:10:26.187 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:37:35 -0400 (0:00:00.228) 0:10:26.416 ******** fatal: [managed-node13]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: invalid size specification 'none' in pool 'foo' TASK [fedora.linux_system_roles.storage : Failed message] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111 Wednesday 30 July 2025 21:37:40 -0400 (0:00:04.950) 0:10:31.366 ******** fatal: [managed-node13]: FAILED! => { "changed": false } MSG: {u'_ansible_no_log': False, u'crypts': [], u'pools': [], u'leaves': [], u'changed': False, u'actions': [], u'failed': True, u'volumes': [], u'invocation': {u'module_args': {u'packages_only': False, u'uses_kmod_kvdo': True, u'disklabel_type': None, u'safe_mode': False, u'diskvolume_mkfs_option_map': {u'ext4': u'-F', u'ext3': u'-F', u'ext2': u'-F'}, u'pools': [{u'raid_metadata_version': None, u'encryption_key_size': None, u'encryption_key': None, u'encryption_luks_version': None, u'encryption_tang_url': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_tang_thumbprint': None, u'name': u'foo', u'encryption_password': None, u'encryption': False, u'disks': [u'sda'], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [{u'thin': False, u'encryption_key_size': None, u'encryption_key': None, u'fs_label': None, u'encryption_luks_version': None, u'raid_stripe_size': None, u'fs_type': u'ext4', u'mount_options': None, u'type': None, u'mount_user': None, u'raid_disks': [], u'size': u'none', u'cache_mode': None, u'mount_point': u'/opt/test1', u'compression': None, u'encryption_password': None, u'mount_group': None, u'encryption': None, u'raid_level': None, u'name': u'test1', u'cached': None, u'state': u'present', u'vdo_pool_size': None, u'mount_mode': None, u'cache_size': None, u'thin_pool_name': None, u'cache_devices': [], u'thin_pool_size': None, u'encryption_cipher': None, u'fs_create_options': None, u'deduplication': None}], u'shared': False, u'encryption_clevis_pin': None, u'type': u'lvm', u'encryption_cipher': None, u'raid_chunk_size': None}], u'volumes': [], u'pool_defaults': {u'raid_metadata_version': None, u'encryption_cipher': None, u'encryption_key': None, u'encryption_luks_version': None, u'raid_spare_count': None, u'grow_to_fill': False, u'encryption_password': None, u'encryption': False, u'disks': [], u'raid_level': None, u'raid_device_count': None, u'state': u'present', u'volumes': [], u'shared': False, u'type': u'lvm', u'encryption_key_size': None, u'raid_chunk_size': None}, u'volume_defaults': {u'raid_metadata_version': None, u'raid_level': None, u'fs_type': u'xfs', u'mount_options': u'defaults', u'size': 0, u'mount_point': u'', u'compression': None, u'encryption_password': None, u'encryption': False, u'mount_device_identifier': u'uuid', u'raid_device_count': None, u'state': u'present', u'vdo_pool_size': None, u'thin_pool_name': None, u'type': u'lvm', u'encryption_key_size': None, u'encryption_cipher': None, u'encryption_key': None, u'fs_label': u'', u'encryption_luks_version': None, u'raid_stripe_size': None, u'cache_size': 0, u'raid_spare_count': None, u'cache_mode': None, u'deduplication': None, u'cached': False, u'fs_overwrite_existing': True, u'disks': [], u'thin_pool_size': None, u'thin': None, u'mount_check': 0, u'mount_passno': 0, u'raid_chunk_size': None, u'cache_devices': [], u'fs_create_options': u''}, u'use_partitions': None}}, u'mounts': [], u'packages': [], u'msg': u"invalid size specification 'none' in pool 'foo'"} TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:37:40 -0400 (0:00:00.174) 0:10:31.541 ******** TASK [Check that we failed in the role] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23 Wednesday 30 July 2025 21:37:40 -0400 (0:00:00.089) 0:10:31.631 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28 Wednesday 30 July 2025 21:37:40 -0400 (0:00:00.200) 0:10:31.831 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39 Wednesday 30 July 2025 21:37:40 -0400 (0:00:00.260) 0:10:32.091 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up] **************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:185 Wednesday 30 July 2025 21:37:40 -0400 (0:00:00.136) 0:10:32.228 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:37:41 -0400 (0:00:00.600) 0:10:32.829 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:37:41 -0400 (0:00:00.238) 0:10:33.068 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:37:41 -0400 (0:00:00.104) 0:10:33.172 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.203) 0:10:33.375 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.108) 0:10:33.484 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.167) 0:10:33.651 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.190) 0:10:33.841 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.167) 0:10:34.009 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:37:42 -0400 (0:00:00.173) 0:10:34.182 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.118) 0:10:34.301 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "state": "absent", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.182) 0:10:34.483 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.188) 0:10:34.672 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.198) 0:10:34.871 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.217) 0:10:35.088 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:37:43 -0400 (0:00:00.198) 0:10:35.287 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:37:44 -0400 (0:00:00.191) 0:10:35.479 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:37:44 -0400 (0:00:00.294) 0:10:35.774 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:37:44 -0400 (0:00:00.207) 0:10:35.981 ******** changed: [managed-node13] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext4", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:37:50 -0400 (0:00:05.385) 0:10:41.366 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:37:50 -0400 (0:00:00.159) 0:10:41.526 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925315.527523, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6b95df90798c1adba66c669f2bd76d2d16d86df5", "ctime": 1753925315.524523, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925315.524523, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:37:51 -0400 (0:00:00.932) 0:10:42.458 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:37:52 -0400 (0:00:00.984) 0:10:43.442 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:37:52 -0400 (0:00:00.175) 0:10:43.618 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext4" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext4", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:37:52 -0400 (0:00:00.167) 0:10:43.786 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:37:52 -0400 (0:00:00.132) 0:10:43.918 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:37:52 -0400 (0:00:00.127) 0:10:44.045 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'state': u'absent', u'fstype': u'ext4', u'path': u'/opt/test1'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "fstype": "ext4", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:37:53 -0400 (0:00:00.777) 0:10:44.823 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:37:54 -0400 (0:00:01.001) 0:10:45.824 ******** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:37:54 -0400 (0:00:00.173) 0:10:45.998 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:37:54 -0400 (0:00:00.159) 0:10:46.158 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:37:55 -0400 (0:00:00.968) 0:10:47.127 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:37:56 -0400 (0:00:00.692) 0:10:47.819 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:37:56 -0400 (0:00:00.109) 0:10:47.929 ******** ok: [managed-node13] TASK [Verify role results - 6] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:198 Wednesday 30 July 2025 21:37:57 -0400 (0:00:01.175) 0:10:49.104 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:37:58 -0400 (0:00:00.251) 0:10:49.356 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:37:58 -0400 (0:00:00.195) 0:10:49.551 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:37:58 -0400 (0:00:00.181) 0:10:49.733 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:37:59 -0400 (0:00:00.757) 0:10:50.491 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002652", "end": "2025-07-30 21:37:59.728260", "rc": 0, "start": "2025-07-30 21:37:59.725608" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:37:59 -0400 (0:00:00.692) 0:10:51.183 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003616", "end": "2025-07-30 21:38:00.526626", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:38:00.523010" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:38:00 -0400 (0:00:00.837) 0:10:52.021 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:38:00 -0400 (0:00:00.252) 0:10:52.273 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.088) 0:10:52.362 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.086) 0:10:52.448 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.074) 0:10:52.522 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.157) 0:10:52.680 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.115) 0:10:52.795 ******** TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.065) 0:10:52.860 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.073) 0:10:52.934 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:38:01 -0400 (0:00:00.186) 0:10:53.121 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:38:02 -0400 (0:00:00.195) 0:10:53.316 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:38:02 -0400 (0:00:00.117) 0:10:53.433 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:38:02 -0400 (0:00:00.136) 0:10:53.569 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:38:02 -0400 (0:00:00.133) 0:10:53.703 ******** TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:38:02 -0400 (0:00:00.117) 0:10:53.821 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.587) 0:10:54.409 ******** TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.159) 0:10:54.569 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.175) 0:10:54.745 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.143) 0:10:54.889 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.133) 0:10:55.022 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.073) 0:10:55.096 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:38:03 -0400 (0:00:00.100) 0:10:55.196 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.105) 0:10:55.301 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.104) 0:10:55.406 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.127) 0:10:55.534 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.245) 0:10:55.780 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.100) 0:10:55.880 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.093) 0:10:55.974 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:38:04 -0400 (0:00:00.113) 0:10:56.087 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.213) 0:10:56.301 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.202) 0:10:56.503 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.053) 0:10:56.557 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.083) 0:10:56.640 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.054) 0:10:56.695 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.074) 0:10:56.770 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.070) 0:10:56.840 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.076) 0:10:56.916 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:38:05 -0400 (0:00:00.088) 0:10:57.005 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.303) 0:10:57.308 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.171) 0:10:57.479 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.146) 0:10:57.625 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.103) 0:10:57.729 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.125) 0:10:57.855 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.073) 0:10:57.928 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.229) 0:10:58.158 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.085) 0:10:58.244 ******** TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:38:06 -0400 (0:00:00.048) 0:10:58.292 ******** TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.050) 0:10:58.342 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.135) 0:10:58.477 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.216) 0:10:58.694 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.222) 0:10:58.916 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.096) 0:10:59.013 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.085) 0:10:59.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.098) 0:10:59.197 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:38:07 -0400 (0:00:00.081) 0:10:59.279 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.068) 0:10:59.348 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.065) 0:10:59.413 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.060) 0:10:59.473 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.162) 0:10:59.636 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.057) 0:10:59.693 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.052) 0:10:59.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.153) 0:10:59.899 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.084) 0:10:59.984 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.126) 0:11:00.111 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.096) 0:11:00.207 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:38:08 -0400 (0:00:00.052) 0:11:00.259 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:38:09 -0400 (0:00:00.118) 0:11:00.377 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:38:09 -0400 (0:00:00.231) 0:11:00.609 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:38:09 -0400 (0:00:00.070) 0:11:00.680 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:38:09 -0400 (0:00:00.460) 0:11:01.140 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.293) 0:11:01.433 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.144) 0:11:01.578 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.154) 0:11:01.732 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.087) 0:11:01.820 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.103) 0:11:01.924 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.109) 0:11:02.034 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:38:10 -0400 (0:00:00.157) 0:11:02.192 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.139) 0:11:02.331 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.115) 0:11:02.447 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.097) 0:11:02.545 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.095) 0:11:02.641 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.170) 0:11:02.811 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.105) 0:11:02.917 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.089) 0:11:03.017 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:38:11 -0400 (0:00:00.143) 0:11:03.160 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:38:12 -0400 (0:00:00.185) 0:11:03.346 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:38:12 -0400 (0:00:00.149) 0:11:03.496 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:38:12 -0400 (0:00:00.091) 0:11:03.588 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:38:12 -0400 (0:00:00.130) 0:11:03.718 ******** ok: [managed-node13] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.757) 0:11:04.476 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.170) 0:11:04.647 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.134) 0:11:04.782 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.102) 0:11:04.884 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.133) 0:11:05.018 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.127) 0:11:05.146 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:38:13 -0400 (0:00:00.115) 0:11:05.262 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:38:14 -0400 (0:00:00.116) 0:11:05.379 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:38:14 -0400 (0:00:00.872) 0:11:06.252 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.086) 0:11:06.338 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.110) 0:11:06.449 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.086) 0:11:06.536 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.090) 0:11:06.626 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.115) 0:11:06.741 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.052) 0:11:06.794 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.082) 0:11:06.876 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.096) 0:11:06.973 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.094) 0:11:07.067 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:38:15 -0400 (0:00:00.114) 0:11:07.182 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.127) 0:11:07.310 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.074) 0:11:07.384 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.089) 0:11:07.489 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.091) 0:11:07.581 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.146) 0:11:07.727 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.066) 0:11:07.793 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.117) 0:11:07.911 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.090) 0:11:08.001 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.111) 0:11:08.112 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:38:16 -0400 (0:00:00.110) 0:11:08.223 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.122) 0:11:08.346 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.106) 0:11:08.452 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.065) 0:11:08.517 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.056) 0:11:08.574 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.053) 0:11:08.628 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.059) 0:11:08.688 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.055) 0:11:08.743 ******** ok: [managed-node13] => { "storage_test_expected_size": "10898479513" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.059) 0:11:08.803 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.053) 0:11:08.856 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.058) 0:11:08.914 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.094) 0:11:09.009 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.069) 0:11:09.079 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.071) 0:11:09.151 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:38:17 -0400 (0:00:00.114) 0:11:09.265 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.104) 0:11:09.369 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.127) 0:11:09.497 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.124) 0:11:09.622 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.116) 0:11:09.739 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.103) 0:11:09.842 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.102) 0:11:09.945 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.104) 0:11:10.050 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.080) 0:11:10.130 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:38:18 -0400 (0:00:00.133) 0:11:10.264 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.080) 0:11:10.344 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.085) 0:11:10.430 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.106) 0:11:10.536 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.058) 0:11:10.595 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.070) 0:11:10.666 ******** ok: [managed-node13] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.061) 0:11:10.727 ******** ok: [managed-node13] => { "storage_test_expected_size": "10898479513" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.061) 0:11:10.788 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.056) 0:11:10.845 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.069) 0:11:10.914 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.057) 0:11:10.971 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.063) 0:11:11.035 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.054) 0:11:11.090 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.054) 0:11:11.144 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.053) 0:11:11.198 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:38:19 -0400 (0:00:00.053) 0:11:11.252 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.051) 0:11:11.304 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.046) 0:11:11.350 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a LVM logical volume with for ext3 FS size 5g] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:205 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.059) 0:11:11.409 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.109) 0:11:11.519 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.081) 0:11:11.600 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.063) 0:11:11.664 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.137) 0:11:11.802 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.047) 0:11:11.850 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.049) 0:11:11.899 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.051) 0:11:11.951 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.049) 0:11:12.000 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.125) 0:11:12.126 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.048) 0:11:12.174 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext3", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.059) 0:11:12.234 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:38:20 -0400 (0:00:00.055) 0:11:12.289 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.055) 0:11:12.344 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.045) 0:11:12.390 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.042) 0:11:12.433 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.046) 0:11:12.479 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.067) 0:11:12.547 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:38:21 -0400 (0:00:00.038) 0:11:12.586 ******** changed: [managed-node13] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:38:28 -0400 (0:00:07.147) 0:11:19.733 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:38:28 -0400 (0:00:00.033) 0:11:19.766 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925874.3169415, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1753925873.2719371, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925873.2719371, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:38:28 -0400 (0:00:00.315) 0:11:20.082 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.336) 0:11:20.418 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.034) 0:11:20.453 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.045) 0:11:20.499 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.040) 0:11:20.539 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.035) 0:11:20.575 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.038) 0:11:20.614 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:38:29 -0400 (0:00:00.449) 0:11:21.064 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext3", "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:38:30 -0400 (0:00:00.475) 0:11:21.540 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:38:30 -0400 (0:00:00.059) 0:11:21.599 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:38:30 -0400 (0:00:00.443) 0:11:22.042 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:38:31 -0400 (0:00:00.314) 0:11:22.356 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:38:31 -0400 (0:00:00.040) 0:11:22.397 ******** ok: [managed-node13] TASK [Verify role results - 7] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:221 Wednesday 30 July 2025 21:38:32 -0400 (0:00:01.698) 0:11:24.095 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:38:32 -0400 (0:00:00.086) 0:11:24.182 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:38:32 -0400 (0:00:00.072) 0:11:24.254 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:38:33 -0400 (0:00:00.055) 0:11:24.309 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext3", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "c458e499-36c0-4c93-be16-cb3e776cd12a" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "mYWJT1-FNeb-zbSL-ZvuM-scAE-drg0-x0zfjj" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:38:33 -0400 (0:00:00.386) 0:11:24.696 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002609", "end": "2025-07-30 21:38:33.678530", "rc": 0, "start": "2025-07-30 21:38:33.675921" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext3 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:38:33 -0400 (0:00:00.359) 0:11:25.055 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002501", "end": "2025-07-30 21:38:34.029975", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:38:34.027474" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.348) 0:11:25.404 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.110) 0:11:25.515 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.055) 0:11:25.571 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018359", "end": "2025-07-30 21:38:34.568895", "rc": 0, "start": "2025-07-30 21:38:34.550536" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.373) 0:11:25.944 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.069) 0:11:26.014 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.123) 0:11:26.137 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:38:34 -0400 (0:00:00.069) 0:11:26.207 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.368) 0:11:26.575 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.058) 0:11:26.634 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.060) 0:11:26.694 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.071) 0:11:26.766 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.067) 0:11:26.834 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.060) 0:11:26.894 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.058) 0:11:26.952 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:38:35 -0400 (0:00:00.075) 0:11:27.028 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.276) 0:11:27.305 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.059) 0:11:27.364 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.114) 0:11:27.478 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.053) 0:11:27.532 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.050) 0:11:27.582 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.047) 0:11:27.629 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.050) 0:11:27.680 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.119) 0:11:27.799 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.054) 0:11:27.854 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.051) 0:11:27.906 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.051) 0:11:27.957 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.051) 0:11:28.009 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.053) 0:11:28.063 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.056) 0:11:28.119 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:38:36 -0400 (0:00:00.111) 0:11:28.231 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.092) 0:11:28.323 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.045) 0:11:28.369 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.038) 0:11:28.407 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.033) 0:11:28.440 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.033) 0:11:28.474 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.036) 0:11:28.510 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.041) 0:11:28.552 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.053) 0:11:28.605 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.116) 0:11:28.722 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.119) 0:11:28.841 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.051) 0:11:28.892 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.049) 0:11:28.942 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.050) 0:11:28.993 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.049) 0:11:29.043 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.104) 0:11:29.147 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.045) 0:11:29.192 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.038) 0:11:29.231 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:38:37 -0400 (0:00:00.063) 0:11:29.295 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.039) 0:11:29.335 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.039) 0:11:29.374 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.032) 0:11:29.407 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.031) 0:11:29.439 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.035) 0:11:29.474 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.044) 0:11:29.518 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.056) 0:11:29.575 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.124) 0:11:29.699 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.117) 0:11:29.816 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.053) 0:11:29.870 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.055) 0:11:29.925 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.052) 0:11:29.977 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.050) 0:11:30.028 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.049) 0:11:30.078 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.053) 0:11:30.131 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:38:38 -0400 (0:00:00.049) 0:11:30.181 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.136) 0:11:30.317 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.053) 0:11:30.370 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.053) 0:11:30.424 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.054) 0:11:30.478 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.050) 0:11:30.528 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.051) 0:11:30.580 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.049) 0:11:30.629 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.050) 0:11:30.679 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.120) 0:11:30.800 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.109) 0:11:30.909 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.049) 0:11:30.959 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.188) 0:11:31.147 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.058) 0:11:31.206 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:38:39 -0400 (0:00:00.063) 0:11:31.270 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.054) 0:11:31.324 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.049) 0:11:31.374 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.041) 0:11:31.416 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.037) 0:11:31.454 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.038) 0:11:31.492 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.042) 0:11:31.535 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.052) 0:11:31.587 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.049) 0:11:31.636 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.053) 0:11:31.690 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext3 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.084) 0:11:31.775 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.053) 0:11:31.828 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.052) 0:11:31.881 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.042) 0:11:31.923 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.041) 0:11:31.964 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.035) 0:11:31.999 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.067) 0:11:32.067 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:38:40 -0400 (0:00:00.071) 0:11:32.139 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925908.3350883, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925908.3350883, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 224647, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925908.3350883, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.369) 0:11:32.508 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.065) 0:11:32.574 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.050) 0:11:32.624 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.063) 0:11:32.687 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.055) 0:11:32.743 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.050) 0:11:32.793 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.061) 0:11:32.855 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:38:41 -0400 (0:00:00.048) 0:11:32.903 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.533) 0:11:33.437 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.033) 0:11:33.470 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.042) 0:11:33.512 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.069) 0:11:33.581 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.049) 0:11:33.631 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.049) 0:11:33.680 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.050) 0:11:33.731 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.050) 0:11:33.782 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.049) 0:11:33.832 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.065) 0:11:33.897 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.060) 0:11:33.958 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.056) 0:11:34.015 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.049) 0:11:34.064 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.053) 0:11:34.118 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.052) 0:11:34.170 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.052) 0:11:34.223 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:38:42 -0400 (0:00:00.050) 0:11:34.273 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.050) 0:11:34.324 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.051) 0:11:34.376 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.056) 0:11:34.432 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.050) 0:11:34.482 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.053) 0:11:34.535 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.051) 0:11:34.587 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.050) 0:11:34.637 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.050) 0:11:34.687 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:38:43 -0400 (0:00:00.433) 0:11:35.121 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.330) 0:11:35.451 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.044) 0:11:35.496 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.040) 0:11:35.536 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.293) 0:11:35.829 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.037) 0:11:35.867 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.038) 0:11:35.906 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.037) 0:11:35.943 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.037) 0:11:35.981 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.040) 0:11:36.021 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.035) 0:11:36.057 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.034) 0:11:36.092 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.037) 0:11:36.129 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.035) 0:11:36.164 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.035) 0:11:36.200 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.035) 0:11:36.235 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:38:44 -0400 (0:00:00.035) 0:11:36.270 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.035) 0:11:36.305 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.037) 0:11:36.342 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.034) 0:11:36.377 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.034) 0:11:36.412 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.033) 0:11:36.445 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.034) 0:11:36.480 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.034) 0:11:36.515 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.041) 0:11:36.556 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.037) 0:11:36.593 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.044) 0:11:36.638 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.017035", "end": "2025-07-30 21:38:45.594947", "rc": 0, "start": "2025-07-30 21:38:45.577912" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.309) 0:11:36.948 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.041) 0:11:36.989 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.048) 0:11:37.037 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.035) 0:11:37.073 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.035) 0:11:37.108 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.037) 0:11:37.146 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.036) 0:11:37.182 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.032) 0:11:37.215 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.029) 0:11:37.244 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change volume size to 9g] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:224 Wednesday 30 July 2025 21:38:45 -0400 (0:00:00.032) 0:11:37.277 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.064) 0:11:37.341 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.050) 0:11:37.392 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.040) 0:11:37.432 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.083) 0:11:37.515 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.034) 0:11:37.550 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.031) 0:11:37.581 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.032) 0:11:37.614 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.032) 0:11:37.647 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.076) 0:11:37.723 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.033) 0:11:37.756 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext3", "mount_point": "/opt/test1", "name": "test1", "size": "9g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.040) 0:11:37.797 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.035) 0:11:37.832 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.031) 0:11:37.864 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.034) 0:11:37.899 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.034) 0:11:37.933 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.032) 0:11:37.966 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.049) 0:11:38.015 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:38:46 -0400 (0:00:00.035) 0:11:38.051 ******** changed: [managed-node13] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:38:51 -0400 (0:00:05.146) 0:11:43.197 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:38:51 -0400 (0:00:00.036) 0:11:43.234 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925910.097096, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1ff1203632e7c5aaed867a74eb25885b038df3b8", "ctime": 1753925910.094096, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925910.094096, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.299) 0:11:43.533 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.324) 0:11:43.858 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.046) 0:11:43.905 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.054) 0:11:43.960 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.054) 0:11:44.014 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.047) 0:11:44.062 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:38:52 -0400 (0:00:00.035) 0:11:44.097 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:38:53 -0400 (0:00:00.464) 0:11:44.561 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext3", "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:38:53 -0400 (0:00:00.348) 0:11:44.910 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:38:53 -0400 (0:00:00.066) 0:11:44.976 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:38:54 -0400 (0:00:00.486) 0:11:45.463 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:38:54 -0400 (0:00:00.338) 0:11:45.801 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:38:54 -0400 (0:00:00.032) 0:11:45.833 ******** ok: [managed-node13] TASK [Verify role results - 8] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:238 Wednesday 30 July 2025 21:38:55 -0400 (0:00:00.725) 0:11:46.559 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:38:55 -0400 (0:00:00.097) 0:11:46.656 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:38:55 -0400 (0:00:00.071) 0:11:46.728 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:38:55 -0400 (0:00:00.051) 0:11:46.779 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext3", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "9G", "type": "lvm", "uuid": "c458e499-36c0-4c93-be16-cb3e776cd12a" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "mYWJT1-FNeb-zbSL-ZvuM-scAE-drg0-x0zfjj" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:38:55 -0400 (0:00:00.357) 0:11:47.137 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002736", "end": "2025-07-30 21:38:56.117759", "rc": 0, "start": "2025-07-30 21:38:56.115023" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext3 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:38:56 -0400 (0:00:00.358) 0:11:47.496 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002442", "end": "2025-07-30 21:38:56.470693", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:38:56.468251" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:38:56 -0400 (0:00:00.348) 0:11:47.844 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:38:56 -0400 (0:00:00.109) 0:11:47.954 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:38:56 -0400 (0:00:00.054) 0:11:48.008 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.020196", "end": "2025-07-30 21:38:57.014852", "rc": 0, "start": "2025-07-30 21:38:56.994656" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.385) 0:11:48.393 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.069) 0:11:48.463 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.114) 0:11:48.577 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.064) 0:11:48.642 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.370) 0:11:49.012 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.059) 0:11:49.072 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.060) 0:11:49.133 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.072) 0:11:49.205 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:38:57 -0400 (0:00:00.062) 0:11:49.268 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.057) 0:11:49.325 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.051) 0:11:49.377 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.073) 0:11:49.450 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.278) 0:11:49.729 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.116) 0:11:49.846 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.115) 0:11:49.961 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.054) 0:11:50.016 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.053) 0:11:50.069 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.053) 0:11:50.122 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.050) 0:11:50.173 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.053) 0:11:50.226 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:38:58 -0400 (0:00:00.051) 0:11:50.278 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.051) 0:11:50.330 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.051) 0:11:50.382 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.054) 0:11:50.436 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.051) 0:11:50.488 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.053) 0:11:50.542 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.112) 0:11:50.654 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.115) 0:11:50.770 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.053) 0:11:50.824 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.050) 0:11:50.874 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.051) 0:11:50.926 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.055) 0:11:50.981 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.054) 0:11:51.035 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.051) 0:11:51.087 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.049) 0:11:51.136 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:38:59 -0400 (0:00:00.115) 0:11:51.252 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.117) 0:11:51.370 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.043) 0:11:51.414 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.040) 0:11:51.454 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.035) 0:11:51.489 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.035) 0:11:51.525 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.083) 0:11:51.609 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.056) 0:11:51.665 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.061) 0:11:51.727 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.104) 0:11:51.832 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.059) 0:11:51.892 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.060) 0:11:51.952 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.050) 0:11:52.003 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.048) 0:11:52.052 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.047) 0:11:52.099 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.040) 0:11:52.140 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.043) 0:11:52.183 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:39:00 -0400 (0:00:00.095) 0:11:52.278 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.085) 0:11:52.364 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.034) 0:11:52.398 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.033) 0:11:52.432 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.032) 0:11:52.464 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.034) 0:11:52.499 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.037) 0:11:52.536 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.045) 0:11:52.582 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.053) 0:11:52.635 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.209) 0:11:52.845 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.051) 0:11:52.896 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.050) 0:11:52.947 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.050) 0:11:52.997 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.051) 0:11:53.049 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.051) 0:11:53.100 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.054) 0:11:53.155 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.051) 0:11:53.207 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:39:01 -0400 (0:00:00.052) 0:11:53.260 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.107) 0:11:53.367 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.064) 0:11:53.432 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.267) 0:11:53.699 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.061) 0:11:53.760 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.063) 0:11:53.824 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.064) 0:11:53.889 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.060) 0:11:53.949 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.062) 0:11:54.011 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.047) 0:11:54.059 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.048) 0:11:54.108 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.043) 0:11:54.151 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.034) 0:11:54.185 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.032) 0:11:54.218 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:39:02 -0400 (0:00:00.036) 0:11:54.254 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext3 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.087) 0:11:54.341 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.058) 0:11:54.400 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.052) 0:11:54.453 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.039) 0:11:54.493 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.050) 0:11:54.543 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.036) 0:11:54.579 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.045) 0:11:54.624 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.060) 0:11:54.685 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925931.7961895, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925931.7961895, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 224647, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925931.7961895, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.335) 0:11:55.020 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.047) 0:11:55.067 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.046) 0:11:55.114 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.061) 0:11:55.175 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.057) 0:11:55.233 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:39:03 -0400 (0:00:00.052) 0:11:55.285 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.065) 0:11:55.350 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.063) 0:11:55.413 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.558) 0:11:55.972 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.050) 0:11:56.023 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.047) 0:11:56.071 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.065) 0:11:56.136 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.051) 0:11:56.188 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.048) 0:11:56.237 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:39:04 -0400 (0:00:00.041) 0:11:56.278 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.040) 0:11:56.319 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.041) 0:11:56.360 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.051) 0:11:56.412 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.039) 0:11:56.451 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.031) 0:11:56.483 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.106) 0:11:56.590 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.052) 0:11:56.643 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.052) 0:11:56.696 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.050) 0:11:56.747 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.050) 0:11:56.797 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.049) 0:11:56.847 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.053) 0:11:56.900 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.051) 0:11:56.952 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.049) 0:11:57.001 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.049) 0:11:57.050 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.049) 0:11:57.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.050) 0:11:57.150 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:39:05 -0400 (0:00:00.052) 0:11:57.203 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:39:06 -0400 (0:00:00.352) 0:11:57.555 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:39:06 -0400 (0:00:00.356) 0:11:57.911 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "9663676416" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:39:06 -0400 (0:00:00.069) 0:11:57.980 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:39:06 -0400 (0:00:00.058) 0:11:58.039 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.343) 0:11:58.382 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.043) 0:11:58.425 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.041) 0:11:58.467 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.040) 0:11:58.508 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.041) 0:11:58.550 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.041) 0:11:58.591 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.039) 0:11:58.630 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.043) 0:11:58.673 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.040) 0:11:58.713 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.038) 0:11:58.752 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.036) 0:11:58.789 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.034) 0:11:58.823 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.034) 0:11:58.858 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.036) 0:11:58.895 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.035) 0:11:58.930 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.036) 0:11:58.966 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.034) 0:11:59.001 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.035) 0:11:59.036 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.034) 0:11:59.071 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.037) 0:11:59.108 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 9663676416, "changed": false, "failed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.036) 0:11:59.145 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.036) 0:11:59.181 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:39:07 -0400 (0:00:00.043) 0:11:59.225 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.018329", "end": "2025-07-30 21:39:08.182506", "rc": 0, "start": "2025-07-30 21:39:08.164177" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.308) 0:11:59.533 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.041) 0:11:59.575 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.042) 0:11:59.617 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.034) 0:11:59.652 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.036) 0:11:59.689 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.035) 0:11:59.724 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.034) 0:11:59.759 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.032) 0:11:59.792 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.030) 0:11:59.822 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change volume size to before size 5g] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:241 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.033) 0:11:59.855 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.067) 0:11:59.923 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.050) 0:11:59.973 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.040) 0:12:00.013 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.085) 0:12:00.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.033) 0:12:00.132 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.033) 0:12:00.165 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.032) 0:12:00.198 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:39:08 -0400 (0:00:00.032) 0:12:00.231 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.124) 0:12:00.356 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.035) 0:12:00.391 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext3", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.040) 0:12:00.431 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.038) 0:12:00.470 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.032) 0:12:00.503 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.033) 0:12:00.537 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.031) 0:12:00.568 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.033) 0:12:00.602 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.047) 0:12:00.649 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:39:09 -0400 (0:00:00.029) 0:12:00.679 ******** changed: [managed-node13] => { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:39:14 -0400 (0:00:05.474) 0:12:06.154 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:39:14 -0400 (0:00:00.036) 0:12:06.190 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925910.097096, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1ff1203632e7c5aaed867a74eb25885b038df3b8", "ctime": 1753925910.094096, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925910.094096, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.301) 0:12:06.491 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.341) 0:12:06.832 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.039) 0:12:06.872 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.055) 0:12:06.928 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.046) 0:12:06.974 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.037) 0:12:07.011 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:39:15 -0400 (0:00:00.031) 0:12:07.043 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:39:16 -0400 (0:00:00.458) 0:12:07.501 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext3", "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:39:16 -0400 (0:00:00.366) 0:12:07.867 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext3', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext3", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:39:16 -0400 (0:00:00.061) 0:12:07.929 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:39:17 -0400 (0:00:00.465) 0:12:08.394 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:39:17 -0400 (0:00:00.344) 0:12:08.739 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:39:17 -0400 (0:00:00.032) 0:12:08.771 ******** ok: [managed-node13] TASK [Verify role results - 9] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:255 Wednesday 30 July 2025 21:39:18 -0400 (0:00:00.727) 0:12:09.499 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:39:18 -0400 (0:00:00.105) 0:12:09.605 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:39:18 -0400 (0:00:00.073) 0:12:09.679 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:39:18 -0400 (0:00:00.050) 0:12:09.730 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext3", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "c458e499-36c0-4c93-be16-cb3e776cd12a" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "mYWJT1-FNeb-zbSL-ZvuM-scAE-drg0-x0zfjj" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:39:18 -0400 (0:00:00.355) 0:12:10.085 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002626", "end": "2025-07-30 21:39:19.055914", "rc": 0, "start": "2025-07-30 21:39:19.053288" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext3 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:39:19 -0400 (0:00:00.346) 0:12:10.432 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002456", "end": "2025-07-30 21:39:19.406057", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:39:19.403601" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:39:19 -0400 (0:00:00.349) 0:12:10.782 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:39:19 -0400 (0:00:00.113) 0:12:10.896 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:39:19 -0400 (0:00:00.053) 0:12:10.949 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.017673", "end": "2025-07-30 21:39:19.948118", "rc": 0, "start": "2025-07-30 21:39:19.930445" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.373) 0:12:11.323 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.140) 0:12:11.464 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.124) 0:12:11.588 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.082) 0:12:11.671 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.351) 0:12:12.022 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.058) 0:12:12.081 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.063) 0:12:12.144 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.067) 0:12:12.211 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:39:20 -0400 (0:00:00.059) 0:12:12.271 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.064) 0:12:12.335 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.056) 0:12:12.392 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.074) 0:12:12.466 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.285) 0:12:12.752 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.059) 0:12:12.811 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.112) 0:12:12.924 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.053) 0:12:12.978 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.051) 0:12:13.029 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.051) 0:12:13.081 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.053) 0:12:13.135 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.054) 0:12:13.190 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.050) 0:12:13.240 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:39:21 -0400 (0:00:00.052) 0:12:13.292 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.052) 0:12:13.345 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.051) 0:12:13.397 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.053) 0:12:13.451 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.051) 0:12:13.502 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.115) 0:12:13.618 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.128) 0:12:13.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.053) 0:12:13.800 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.051) 0:12:13.852 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.051) 0:12:13.904 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.050) 0:12:13.955 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.050) 0:12:14.006 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.053) 0:12:14.059 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.051) 0:12:14.110 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:39:22 -0400 (0:00:00.111) 0:12:14.222 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.104) 0:12:14.327 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.043) 0:12:14.370 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.041) 0:12:14.412 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.038) 0:12:14.450 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.034) 0:12:14.484 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.084) 0:12:14.569 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.055) 0:12:14.624 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.062) 0:12:14.686 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.114) 0:12:14.801 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.060) 0:12:14.861 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.062) 0:12:14.924 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.052) 0:12:14.977 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.050) 0:12:15.027 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.051) 0:12:15.078 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.042) 0:12:15.120 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:39:23 -0400 (0:00:00.103) 0:12:15.224 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.080) 0:12:15.304 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.074) 0:12:15.379 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.035) 0:12:15.415 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.034) 0:12:15.450 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.032) 0:12:15.482 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.038) 0:12:15.521 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.048) 0:12:15.570 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.057) 0:12:15.627 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.055) 0:12:15.682 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.137) 0:12:15.819 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.058) 0:12:15.878 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.050) 0:12:15.928 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.049) 0:12:15.978 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.050) 0:12:16.029 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.050) 0:12:16.079 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.048) 0:12:16.128 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.053) 0:12:16.181 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:39:24 -0400 (0:00:00.048) 0:12:16.230 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.102) 0:12:16.332 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.060) 0:12:16.392 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.259) 0:12:16.651 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.067) 0:12:16.718 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.067) 0:12:16.786 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.070) 0:12:16.856 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.065) 0:12:16.921 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.055) 0:12:16.977 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.047) 0:12:17.025 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.050) 0:12:17.075 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.041) 0:12:17.116 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.033) 0:12:17.150 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.032) 0:12:17.183 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:39:25 -0400 (0:00:00.039) 0:12:17.222 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext3 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.082) 0:12:17.305 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.054) 0:12:17.359 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.049) 0:12:17.409 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.041) 0:12:17.450 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.045) 0:12:17.496 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.033) 0:12:17.529 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.046) 0:12:17.576 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.069) 0:12:17.645 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925954.7482886, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925954.7482886, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 224647, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925954.7482886, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.336) 0:12:17.982 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.048) 0:12:18.031 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.049) 0:12:18.081 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.062) 0:12:18.143 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.056) 0:12:18.200 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:39:26 -0400 (0:00:00.050) 0:12:18.251 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.063) 0:12:18.315 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.051) 0:12:18.366 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.549) 0:12:18.916 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.053) 0:12:18.969 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.054) 0:12:19.023 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.071) 0:12:19.095 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.053) 0:12:19.148 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.054) 0:12:19.202 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.045) 0:12:19.248 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:39:27 -0400 (0:00:00.041) 0:12:19.290 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.042) 0:12:19.332 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.054) 0:12:19.386 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.039) 0:12:19.426 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.032) 0:12:19.459 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.032) 0:12:19.491 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.040) 0:12:19.532 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.050) 0:12:19.582 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.055) 0:12:19.638 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.053) 0:12:19.691 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.047) 0:12:19.739 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.052) 0:12:19.792 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.051) 0:12:19.843 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.050) 0:12:19.894 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.054) 0:12:19.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.055) 0:12:20.003 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.052) 0:12:20.056 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:39:28 -0400 (0:00:00.050) 0:12:20.107 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:39:29 -0400 (0:00:00.356) 0:12:20.463 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:39:29 -0400 (0:00:00.356) 0:12:20.819 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:39:29 -0400 (0:00:00.069) 0:12:20.889 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:39:29 -0400 (0:00:00.057) 0:12:20.946 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:39:29 -0400 (0:00:00.345) 0:12:21.291 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.049) 0:12:21.341 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.044) 0:12:21.386 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.038) 0:12:21.424 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.037) 0:12:21.462 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.035) 0:12:21.497 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.532 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.036) 0:12:21.569 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.036) 0:12:21.605 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.035) 0:12:21.640 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.675 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.035) 0:12:21.710 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.744 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.779 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.036) 0:12:21.815 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.850 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.885 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.919 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.034) 0:12:21.954 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.036) 0:12:21.991 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.040) 0:12:22.032 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.035) 0:12:22.067 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:39:30 -0400 (0:00:00.044) 0:12:22.111 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.015792", "end": "2025-07-30 21:39:31.066949", "rc": 0, "start": "2025-07-30 21:39:31.051157" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.309) 0:12:22.421 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.042) 0:12:22.463 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.042) 0:12:22.506 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.035) 0:12:22.542 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.034) 0:12:22.576 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.036) 0:12:22.612 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.037) 0:12:22.650 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.033) 0:12:22.684 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.029) 0:12:22.713 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up - 2] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:259 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.032) 0:12:22.745 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.122) 0:12:22.868 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.052) 0:12:22.921 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.040) 0:12:22.961 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.086) 0:12:23.048 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.036) 0:12:23.084 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.032) 0:12:23.117 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.033) 0:12:23.150 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.033) 0:12:23.184 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:39:31 -0400 (0:00:00.080) 0:12:23.264 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.034) 0:12:23.299 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "state": "absent", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.042) 0:12:23.341 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.038) 0:12:23.380 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.033) 0:12:23.414 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.034) 0:12:23.448 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.036) 0:12:23.485 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.033) 0:12:23.518 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.050) 0:12:23.568 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:39:32 -0400 (0:00:00.031) 0:12:23.599 ******** changed: [managed-node13] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext3", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:39:37 -0400 (0:00:04.786) 0:12:28.386 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.033) 0:12:28.419 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925910.097096, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1ff1203632e7c5aaed867a74eb25885b038df3b8", "ctime": 1753925910.094096, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925910.094096, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.299) 0:12:28.719 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.297) 0:12:29.016 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.031) 0:12:29.047 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext3" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext3", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.046) 0:12:29.094 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.041) 0:12:29.135 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:39:37 -0400 (0:00:00.039) 0:12:29.174 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'state': u'absent', u'fstype': u'ext3', u'path': u'/opt/test1'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext3", "mount_info": { "fstype": "ext3", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:39:38 -0400 (0:00:00.305) 0:12:29.480 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:39:38 -0400 (0:00:00.440) 0:12:29.921 ******** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:39:38 -0400 (0:00:00.041) 0:12:29.962 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:39:38 -0400 (0:00:00.041) 0:12:30.004 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:39:39 -0400 (0:00:00.425) 0:12:30.430 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:39:39 -0400 (0:00:00.295) 0:12:30.725 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:39:39 -0400 (0:00:00.031) 0:12:30.756 ******** ok: [managed-node13] TASK [Verify role results - 10] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:272 Wednesday 30 July 2025 21:39:40 -0400 (0:00:00.634) 0:12:31.391 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:39:40 -0400 (0:00:00.062) 0:12:31.453 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext3", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:39:40 -0400 (0:00:00.065) 0:12:31.519 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:39:40 -0400 (0:00:00.040) 0:12:31.560 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:39:41 -0400 (0:00:01.348) 0:12:32.909 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002783", "end": "2025-07-30 21:39:41.882385", "rc": 0, "start": "2025-07-30 21:39:41.879602" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:39:41 -0400 (0:00:00.346) 0:12:33.256 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002463", "end": "2025-07-30 21:39:42.237637", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:39:42.235174" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.351) 0:12:33.608 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.109) 0:12:33.718 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.050) 0:12:33.768 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.049) 0:12:33.818 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.048) 0:12:33.867 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.117) 0:12:33.984 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.064) 0:12:34.049 ******** TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.046) 0:12:34.096 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.059) 0:12:34.156 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.061) 0:12:34.218 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:39:42 -0400 (0:00:00.062) 0:12:34.280 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.060) 0:12:34.341 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.059) 0:12:34.401 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.054) 0:12:34.455 ******** TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.046) 0:12:34.501 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.266) 0:12:34.768 ******** TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.047) 0:12:34.816 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.107) 0:12:34.923 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.053) 0:12:34.977 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.050) 0:12:35.028 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.055) 0:12:35.083 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.051) 0:12:35.135 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.050) 0:12:35.185 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.055) 0:12:35.241 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:39:43 -0400 (0:00:00.049) 0:12:35.291 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.049) 0:12:35.341 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.056) 0:12:35.397 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.050) 0:12:35.448 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.051) 0:12:35.499 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.114) 0:12:35.613 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.114) 0:12:35.728 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.052) 0:12:35.781 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.051) 0:12:35.833 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.051) 0:12:35.884 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.049) 0:12:35.934 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.053) 0:12:35.987 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.047) 0:12:36.035 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.050) 0:12:36.086 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:39:44 -0400 (0:00:00.115) 0:12:36.201 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.188) 0:12:36.390 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.054) 0:12:36.444 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.053) 0:12:36.497 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.050) 0:12:36.548 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.050) 0:12:36.599 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.123) 0:12:36.723 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.061) 0:12:36.784 ******** TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.048) 0:12:36.833 ******** TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.043) 0:12:36.876 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.051) 0:12:36.927 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.124) 0:12:37.052 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.104) 0:12:37.156 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.044) 0:12:37.201 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.051) 0:12:37.253 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:39:45 -0400 (0:00:00.040) 0:12:37.293 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.033) 0:12:37.326 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.032) 0:12:37.359 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.032) 0:12:37.391 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.042) 0:12:37.434 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.138) 0:12:37.573 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.052) 0:12:37.625 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.048) 0:12:37.673 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.051) 0:12:37.725 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.052) 0:12:37.778 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.048) 0:12:37.827 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.050) 0:12:37.877 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.040) 0:12:37.918 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.041) 0:12:37.960 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.080) 0:12:38.041 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.039) 0:12:38.080 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.173) 0:12:38.253 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:39:46 -0400 (0:00:00.038) 0:12:38.292 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.048) 0:12:38.340 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.045) 0:12:38.386 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.045) 0:12:38.431 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.041) 0:12:38.473 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.040) 0:12:38.514 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.040) 0:12:38.555 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.047) 0:12:38.602 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.048) 0:12:38.650 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.050) 0:12:38.701 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.049) 0:12:38.750 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.087) 0:12:38.838 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.050) 0:12:38.888 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.060) 0:12:38.949 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.050) 0:12:39.000 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.063) 0:12:39.064 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.051) 0:12:39.115 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.054) 0:12:39.170 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:39:47 -0400 (0:00:00.052) 0:12:39.223 ******** ok: [managed-node13] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.355) 0:12:39.578 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.116) 0:12:39.695 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.071) 0:12:39.767 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.043) 0:12:39.810 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.054) 0:12:39.865 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.049) 0:12:39.914 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.041) 0:12:39.956 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:39:48 -0400 (0:00:00.051) 0:12:40.007 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.538) 0:12:40.545 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.051) 0:12:40.597 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.049) 0:12:40.647 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.043) 0:12:40.690 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.049) 0:12:40.739 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.048) 0:12:40.788 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.040) 0:12:40.829 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.045) 0:12:40.875 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.045) 0:12:40.920 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.060) 0:12:40.981 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.057) 0:12:41.038 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.048) 0:12:41.087 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.048) 0:12:41.136 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.052) 0:12:41.189 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.051) 0:12:41.241 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:39:49 -0400 (0:00:00.049) 0:12:41.290 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.052) 0:12:41.343 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.049) 0:12:41.392 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.049) 0:12:41.442 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.056) 0:12:41.498 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.050) 0:12:41.549 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.051) 0:12:41.601 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.052) 0:12:41.653 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.050) 0:12:41.703 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:41.751 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.051) 0:12:41.803 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:41.852 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.047) 0:12:41.899 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.053) 0:12:41.952 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:42.001 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:42.050 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.051) 0:12:42.102 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:42.151 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.048) 0:12:42.199 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:39:50 -0400 (0:00:00.055) 0:12:42.255 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.053) 0:12:42.308 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.057) 0:12:42.366 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.056) 0:12:42.423 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.056) 0:12:42.479 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.054) 0:12:42.533 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.054) 0:12:42.588 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.048) 0:12:42.637 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.042) 0:12:42.679 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.048) 0:12:42.727 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.045) 0:12:42.772 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.035) 0:12:42.808 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.034) 0:12:42.843 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.034) 0:12:42.877 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.035) 0:12:42.913 ******** ok: [managed-node13] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.039) 0:12:42.953 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.035) 0:12:42.988 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.033) 0:12:43.022 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.032) 0:12:43.055 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.033) 0:12:43.088 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.032) 0:12:43.120 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.034) 0:12:43.155 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.075) 0:12:43.231 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:39:51 -0400 (0:00:00.033) 0:12:43.264 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.032) 0:12:43.297 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.033) 0:12:43.330 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.029) 0:12:43.360 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create a LVM logical volume with for ext2 FS size 5g] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:279 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.038) 0:12:43.398 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.055) 0:12:43.454 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.049) 0:12:43.504 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.039) 0:12:43.543 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.082) 0:12:43.626 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.032) 0:12:43.659 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.031) 0:12:43.691 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.031) 0:12:43.723 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.031) 0:12:43.754 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.076) 0:12:43.831 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.032) 0:12:43.863 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext2", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.040) 0:12:43.904 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.037) 0:12:43.941 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.035) 0:12:43.977 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.034) 0:12:44.011 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.033) 0:12:44.044 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.033) 0:12:44.077 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.048) 0:12:44.125 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:39:52 -0400 (0:00:00.029) 0:12:44.155 ******** changed: [managed-node13] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:39:57 -0400 (0:00:04.792) 0:12:48.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:39:57 -0400 (0:00:00.046) 0:12:48.994 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925978.513391, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1753925978.1213894, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925978.1213894, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.315) 0:12:49.310 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.366) 0:12:49.677 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.037) 0:12:49.715 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.070) 0:12:49.785 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.058) 0:12:49.844 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.047) 0:12:49.891 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:39:58 -0400 (0:00:00.044) 0:12:49.936 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:39:59 -0400 (0:00:00.435) 0:12:50.371 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext2", "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:39:59 -0400 (0:00:00.373) 0:12:50.744 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:39:59 -0400 (0:00:00.053) 0:12:50.798 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:39:59 -0400 (0:00:00.432) 0:12:51.230 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:40:00 -0400 (0:00:00.327) 0:12:51.557 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:40:00 -0400 (0:00:00.042) 0:12:51.600 ******** ok: [managed-node13] TASK [Verify role results - 11] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:295 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.723) 0:12:52.324 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.069) 0:12:52.394 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.057) 0:12:52.452 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.038) 0:12:52.490 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext2", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "6ba5228b-0aca-4e5b-a6c6-16a4c16d43a1" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "gFFN8d-DCED-cbdA-sJD7-nCd7-B4XU-KKPAvJ" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.349) 0:12:52.840 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002658", "end": "2025-07-30 21:40:01.821040", "rc": 0, "start": "2025-07-30 21:40:01.818382" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext2 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:40:01 -0400 (0:00:00.342) 0:12:53.183 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002449", "end": "2025-07-30 21:40:02.155971", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:40:02.153522" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:40:02 -0400 (0:00:00.351) 0:12:53.534 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:40:02 -0400 (0:00:00.112) 0:12:53.646 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:40:02 -0400 (0:00:00.128) 0:12:53.775 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019368", "end": "2025-07-30 21:40:02.778068", "rc": 0, "start": "2025-07-30 21:40:02.758700" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:40:02 -0400 (0:00:00.381) 0:12:54.156 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:40:02 -0400 (0:00:00.072) 0:12:54.229 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.121) 0:12:54.350 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.064) 0:12:54.415 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.367) 0:12:54.782 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.062) 0:12:54.845 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.061) 0:12:54.906 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.064) 0:12:54.971 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.060) 0:12:55.032 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.063) 0:12:55.095 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.056) 0:12:55.151 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:40:03 -0400 (0:00:00.075) 0:12:55.227 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.281) 0:12:55.508 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.070) 0:12:55.578 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.116) 0:12:55.695 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.053) 0:12:55.748 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.057) 0:12:55.806 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.054) 0:12:55.860 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.050) 0:12:55.911 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.054) 0:12:55.965 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.049) 0:12:56.014 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.049) 0:12:56.064 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.051) 0:12:56.115 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.050) 0:12:56.165 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.051) 0:12:56.216 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:40:04 -0400 (0:00:00.054) 0:12:56.270 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.112) 0:12:56.383 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.111) 0:12:56.495 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.055) 0:12:56.550 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.052) 0:12:56.602 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.049) 0:12:56.652 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.051) 0:12:56.703 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.049) 0:12:56.752 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.051) 0:12:56.804 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.054) 0:12:56.859 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.116) 0:12:56.975 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.108) 0:12:57.083 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.051) 0:12:57.135 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.047) 0:12:57.183 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.048) 0:12:57.231 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:40:05 -0400 (0:00:00.051) 0:12:57.283 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.124) 0:12:57.407 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.061) 0:12:57.469 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.060) 0:12:57.530 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.111) 0:12:57.641 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.129) 0:12:57.770 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.064) 0:12:57.834 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.052) 0:12:57.887 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.048) 0:12:57.936 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.049) 0:12:57.985 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.053) 0:12:58.039 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.057) 0:12:58.097 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:40:06 -0400 (0:00:00.129) 0:12:58.227 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.117) 0:12:58.344 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.051) 0:12:58.396 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.044) 0:12:58.441 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.044) 0:12:58.485 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.043) 0:12:58.528 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.033) 0:12:58.562 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.033) 0:12:58.596 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.048) 0:12:58.644 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.130) 0:12:58.775 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.056) 0:12:58.832 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.056) 0:12:58.889 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.053) 0:12:58.942 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.052) 0:12:58.995 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.052) 0:12:59.047 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.049) 0:12:59.097 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.052) 0:12:59.149 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.048) 0:12:59.198 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:40:07 -0400 (0:00:00.085) 0:12:59.283 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.048) 0:12:59.331 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.172) 0:12:59.503 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.042) 0:12:59.546 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.041) 0:12:59.588 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.050) 0:12:59.638 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.062) 0:12:59.701 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.058) 0:12:59.760 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.058) 0:12:59.818 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.061) 0:12:59.880 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.055) 0:12:59.935 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.052) 0:12:59.987 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.053) 0:13:00.041 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.052) 0:13:00.094 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext2 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.096) 0:13:00.190 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:40:08 -0400 (0:00:00.066) 0:13:00.256 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.065) 0:13:00.322 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.060) 0:13:00.383 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.074) 0:13:00.457 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.056) 0:13:00.513 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.071) 0:13:00.585 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.072) 0:13:00.658 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925997.526473, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753925997.526473, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 244996, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753925997.526473, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.355) 0:13:01.014 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.053) 0:13:01.067 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.044) 0:13:01.112 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.094) 0:13:01.206 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:40:09 -0400 (0:00:00.046) 0:13:01.253 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.049) 0:13:01.303 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.059) 0:13:01.362 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.048) 0:13:01.411 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.514) 0:13:01.926 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.041) 0:13:01.967 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.034) 0:13:02.001 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.045) 0:13:02.046 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.041) 0:13:02.088 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.050) 0:13:02.138 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.049) 0:13:02.187 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.051) 0:13:02.239 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:40:10 -0400 (0:00:00.055) 0:13:02.295 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.065) 0:13:02.360 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.062) 0:13:02.422 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.054) 0:13:02.477 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.052) 0:13:02.529 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.050) 0:13:02.580 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.054) 0:13:02.635 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.049) 0:13:02.685 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.059) 0:13:02.744 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.050) 0:13:02.795 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.050) 0:13:02.846 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.050) 0:13:02.896 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.052) 0:13:02.949 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.051) 0:13:03.000 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.051) 0:13:03.052 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.051) 0:13:03.103 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:40:11 -0400 (0:00:00.051) 0:13:03.155 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:40:12 -0400 (0:00:00.340) 0:13:03.496 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:40:12 -0400 (0:00:00.329) 0:13:03.825 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:40:12 -0400 (0:00:00.066) 0:13:03.892 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:40:12 -0400 (0:00:00.055) 0:13:03.947 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.355) 0:13:04.302 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.062) 0:13:04.365 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.061) 0:13:04.427 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.058) 0:13:04.486 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.058) 0:13:04.545 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.055) 0:13:04.600 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.054) 0:13:04.654 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.057) 0:13:04.712 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.054) 0:13:04.766 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.069) 0:13:04.835 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.060) 0:13:04.896 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.053) 0:13:04.949 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.055) 0:13:05.004 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.061) 0:13:05.066 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.057) 0:13:05.123 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.057) 0:13:05.180 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.051) 0:13:05.232 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:40:13 -0400 (0:00:00.057) 0:13:05.289 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.053) 0:13:05.343 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.060) 0:13:05.404 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.051) 0:13:05.455 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.045) 0:13:05.501 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.057) 0:13:05.559 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.018809", "end": "2025-07-30 21:40:14.525096", "rc": 0, "start": "2025-07-30 21:40:14.506287" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.320) 0:13:05.880 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.041) 0:13:05.921 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.043) 0:13:05.965 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.036) 0:13:06.001 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.036) 0:13:06.037 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.084) 0:13:06.121 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.037) 0:13:06.159 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.032) 0:13:06.192 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.031) 0:13:06.224 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change volume size to after size 9g] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:298 Wednesday 30 July 2025 21:40:14 -0400 (0:00:00.034) 0:13:06.258 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.066) 0:13:06.325 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.052) 0:13:06.377 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.042) 0:13:06.420 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.085) 0:13:06.505 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.036) 0:13:06.541 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.033) 0:13:06.575 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.033) 0:13:06.609 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.032) 0:13:06.641 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.079) 0:13:06.721 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.034) 0:13:06.755 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext2", "mount_point": "/opt/test1", "name": "test1", "size": "9g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.042) 0:13:06.797 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.044) 0:13:06.842 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.033) 0:13:06.876 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.036) 0:13:06.913 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.034) 0:13:06.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.034) 0:13:06.982 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.048) 0:13:07.030 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:40:15 -0400 (0:00:00.029) 0:13:07.060 ******** changed: [managed-node13] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:40:20 -0400 (0:00:05.115) 0:13:12.176 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:40:20 -0400 (0:00:00.037) 0:13:12.213 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925999.369481, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "c6abf0a388bb293ed38f7a11686eb491e2373da7", "ctime": 1753925999.366481, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925999.366481, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.337) 0:13:12.551 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.308) 0:13:12.860 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.044) 0:13:12.904 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.056) 0:13:12.960 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.043) 0:13:13.004 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.040) 0:13:13.045 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:40:21 -0400 (0:00:00.042) 0:13:13.087 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:40:22 -0400 (0:00:00.461) 0:13:13.549 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext2", "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:40:22 -0400 (0:00:00.346) 0:13:13.895 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:40:22 -0400 (0:00:00.080) 0:13:13.976 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:40:23 -0400 (0:00:00.476) 0:13:14.452 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:40:23 -0400 (0:00:00.318) 0:13:14.771 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:40:23 -0400 (0:00:00.040) 0:13:14.811 ******** ok: [managed-node13] TASK [Verify role results - 12] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:312 Wednesday 30 July 2025 21:40:24 -0400 (0:00:00.688) 0:13:15.499 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:40:24 -0400 (0:00:00.091) 0:13:15.590 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "9g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:40:24 -0400 (0:00:00.068) 0:13:15.659 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:40:24 -0400 (0:00:00.049) 0:13:15.709 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext2", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "9G", "type": "lvm", "uuid": "6ba5228b-0aca-4e5b-a6c6-16a4c16d43a1" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "gFFN8d-DCED-cbdA-sJD7-nCd7-B4XU-KKPAvJ" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:40:24 -0400 (0:00:00.332) 0:13:16.041 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002673", "end": "2025-07-30 21:40:25.028498", "rc": 0, "start": "2025-07-30 21:40:25.025825" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext2 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:40:25 -0400 (0:00:00.367) 0:13:16.409 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003077", "end": "2025-07-30 21:40:25.374520", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:40:25.371443" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:40:25 -0400 (0:00:00.341) 0:13:16.751 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:40:25 -0400 (0:00:00.109) 0:13:16.860 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:40:25 -0400 (0:00:00.149) 0:13:17.010 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.020021", "end": "2025-07-30 21:40:26.028079", "rc": 0, "start": "2025-07-30 21:40:26.008058" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.391) 0:13:17.401 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.071) 0:13:17.473 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.121) 0:13:17.594 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.069) 0:13:17.663 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.380) 0:13:18.044 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.065) 0:13:18.109 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.063) 0:13:18.173 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.062) 0:13:18.235 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:40:26 -0400 (0:00:00.058) 0:13:18.293 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.062) 0:13:18.356 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.058) 0:13:18.414 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.076) 0:13:18.490 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.272) 0:13:18.763 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.059) 0:13:18.823 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.114) 0:13:18.937 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.054) 0:13:18.992 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.055) 0:13:19.047 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.053) 0:13:19.101 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.054) 0:13:19.155 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.052) 0:13:19.208 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:40:27 -0400 (0:00:00.050) 0:13:19.259 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.053) 0:13:19.312 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.050) 0:13:19.363 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.052) 0:13:19.416 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.052) 0:13:19.468 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.056) 0:13:19.525 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.114) 0:13:19.639 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.112) 0:13:19.752 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.054) 0:13:19.806 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.049) 0:13:19.856 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.049) 0:13:19.906 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.050) 0:13:19.956 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.048) 0:13:20.004 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.051) 0:13:20.056 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.064) 0:13:20.120 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:40:28 -0400 (0:00:00.119) 0:13:20.239 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.115) 0:13:20.354 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.053) 0:13:20.408 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.052) 0:13:20.461 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.052) 0:13:20.513 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.053) 0:13:20.566 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.127) 0:13:20.694 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.061) 0:13:20.755 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.058) 0:13:20.813 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.099) 0:13:20.913 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.133) 0:13:21.047 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.075) 0:13:21.122 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.050) 0:13:21.173 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.048) 0:13:21.222 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:40:29 -0400 (0:00:00.049) 0:13:21.271 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.052) 0:13:21.323 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.059) 0:13:21.383 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.130) 0:13:21.513 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.120) 0:13:21.634 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.058) 0:13:21.693 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.054) 0:13:21.748 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.051) 0:13:21.799 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.053) 0:13:21.853 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.047) 0:13:21.900 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.046) 0:13:21.947 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.046) 0:13:21.993 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.092) 0:13:22.086 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.038) 0:13:22.124 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.042) 0:13:22.167 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.051) 0:13:22.218 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:40:30 -0400 (0:00:00.052) 0:13:22.271 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.051) 0:13:22.323 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.052) 0:13:22.375 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.049) 0:13:22.424 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.053) 0:13:22.477 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.102) 0:13:22.580 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.060) 0:13:22.640 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.205) 0:13:22.845 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.039) 0:13:22.885 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.043) 0:13:22.929 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.042) 0:13:22.971 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.041) 0:13:23.012 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.038) 0:13:23.051 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.037) 0:13:23.089 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.057) 0:13:23.146 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.053) 0:13:23.200 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:40:31 -0400 (0:00:00.049) 0:13:23.249 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.049) 0:13:23.299 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.052) 0:13:23.351 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext2 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.088) 0:13:23.439 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.062) 0:13:23.502 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.063) 0:13:23.565 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.050) 0:13:23.615 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.061) 0:13:23.677 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.051) 0:13:23.729 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.071) 0:13:23.801 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.070) 0:13:23.871 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753926020.7835734, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753926020.7835734, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 244996, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753926020.7835734, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:40:32 -0400 (0:00:00.373) 0:13:24.244 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.067) 0:13:24.312 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.052) 0:13:24.364 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.064) 0:13:24.429 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.124) 0:13:24.553 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.046) 0:13:24.600 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.050) 0:13:24.650 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.033) 0:13:24.684 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.534) 0:13:25.219 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:40:33 -0400 (0:00:00.052) 0:13:25.271 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.042) 0:13:25.313 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.058) 0:13:25.371 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.043) 0:13:25.415 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.034) 0:13:25.449 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.033) 0:13:25.483 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.037) 0:13:25.520 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.043) 0:13:25.564 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.069) 0:13:25.634 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.065) 0:13:25.699 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.053) 0:13:25.752 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.051) 0:13:25.804 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.052) 0:13:25.857 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.054) 0:13:25.911 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.056) 0:13:25.967 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.051) 0:13:26.018 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.050) 0:13:26.068 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.052) 0:13:26.121 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.049) 0:13:26.171 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.062) 0:13:26.234 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:40:34 -0400 (0:00:00.054) 0:13:26.288 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.052) 0:13:26.341 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.051) 0:13:26.393 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.051) 0:13:26.444 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.362) 0:13:26.806 ******** ok: [managed-node13] => { "bytes": 9663676416, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.317) 0:13:27.124 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "9663676416" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.068) 0:13:27.193 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:40:35 -0400 (0:00:00.063) 0:13:27.257 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.358) 0:13:27.615 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.059) 0:13:27.675 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.057) 0:13:27.732 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.058) 0:13:27.791 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.056) 0:13:27.847 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.057) 0:13:27.905 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.058) 0:13:27.963 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.053) 0:13:28.017 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.056) 0:13:28.074 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.057) 0:13:28.132 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.054) 0:13:28.186 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:40:36 -0400 (0:00:00.054) 0:13:28.240 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.058) 0:13:28.299 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.055) 0:13:28.354 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.053) 0:13:28.408 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.059) 0:13:28.468 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.055) 0:13:28.523 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.055) 0:13:28.579 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.053) 0:13:28.633 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.055) 0:13:28.689 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 9663676416, "changed": false, "failed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.056) 0:13:28.745 ******** ok: [managed-node13] => { "storage_test_expected_size": "9663676416" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.058) 0:13:28.804 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.065) 0:13:28.869 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.018189", "end": "2025-07-30 21:40:37.862082", "rc": 0, "start": "2025-07-30 21:40:37.843893" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.345) 0:13:29.215 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:40:37 -0400 (0:00:00.042) 0:13:29.257 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.047) 0:13:29.305 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.036) 0:13:29.341 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.036) 0:13:29.378 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.035) 0:13:29.414 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.035) 0:13:29.449 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.035) 0:13:29.485 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.030) 0:13:29.515 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change again volume size to before size 5g] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:315 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.034) 0:13:29.550 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.065) 0:13:29.615 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.053) 0:13:29.668 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.040) 0:13:29.709 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.080) 0:13:29.790 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.033) 0:13:29.823 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.035) 0:13:29.859 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.085) 0:13:29.944 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.034) 0:13:29.979 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.076) 0:13:30.056 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.036) 0:13:30.093 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "type": "lvm", "volumes": [ { "fs_type": "ext2", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.041) 0:13:30.134 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.036) 0:13:30.171 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.033) 0:13:30.205 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.032) 0:13:30.238 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:40:38 -0400 (0:00:00.033) 0:13:30.271 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:40:39 -0400 (0:00:00.039) 0:13:30.311 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:40:39 -0400 (0:00:00.049) 0:13:30.361 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:40:39 -0400 (0:00:00.030) 0:13:30.392 ******** changed: [managed-node13] => { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:40:44 -0400 (0:00:05.252) 0:13:35.644 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:40:44 -0400 (0:00:00.038) 0:13:35.683 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925999.369481, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "c6abf0a388bb293ed38f7a11686eb491e2373da7", "ctime": 1753925999.366481, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925999.366481, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:40:44 -0400 (0:00:00.353) 0:13:36.036 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.311) 0:13:36.347 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.044) 0:13:36.392 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" }, { "action": "resize device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.050) 0:13:36.442 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.044) 0:13:36.486 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.038) 0:13:36.525 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.039) 0:13:36.564 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:40:45 -0400 (0:00:00.478) 0:13:37.042 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext2", "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:40:46 -0400 (0:00:00.348) 0:13:37.391 ******** skipping: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'ext2', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "ext2", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:40:46 -0400 (0:00:00.074) 0:13:37.465 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:40:46 -0400 (0:00:00.463) 0:13:37.929 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:40:46 -0400 (0:00:00.323) 0:13:38.252 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:40:46 -0400 (0:00:00.037) 0:13:38.289 ******** ok: [managed-node13] TASK [Verify role results - 13] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:329 Wednesday 30 July 2025 21:40:47 -0400 (0:00:00.678) 0:13:38.967 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:40:47 -0400 (0:00:00.104) 0:13:39.071 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:40:47 -0400 (0:00:00.071) 0:13:39.143 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:40:47 -0400 (0:00:00.053) 0:13:39.196 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "ext2", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "6ba5228b-0aca-4e5b-a6c6-16a4c16d43a1" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "gFFN8d-DCED-cbdA-sJD7-nCd7-B4XU-KKPAvJ" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:40:48 -0400 (0:00:00.339) 0:13:39.535 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002523", "end": "2025-07-30 21:40:48.513543", "rc": 0, "start": "2025-07-30 21:40:48.511020" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 ext2 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:40:48 -0400 (0:00:00.357) 0:13:39.892 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002610", "end": "2025-07-30 21:40:48.849075", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:40:48.846465" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:40:48 -0400 (0:00:00.330) 0:13:40.223 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.111) 0:13:40.335 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.052) 0:13:40.388 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018236", "end": "2025-07-30 21:40:49.461566", "rc": 0, "start": "2025-07-30 21:40:49.443330" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.445) 0:13:40.834 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.069) 0:13:40.904 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.120) 0:13:41.024 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:40:49 -0400 (0:00:00.067) 0:13:41.092 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.363) 0:13:41.456 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.072) 0:13:41.528 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.062) 0:13:41.591 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.063) 0:13:41.655 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.058) 0:13:41.713 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.060) 0:13:41.774 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.055) 0:13:41.830 ******** ok: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.073) 0:13:41.903 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.267) 0:13:42.171 ******** skipping: [managed-node13] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:40:50 -0400 (0:00:00.059) 0:13:42.231 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.112) 0:13:42.343 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.053) 0:13:42.397 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.051) 0:13:42.449 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.052) 0:13:42.501 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.059) 0:13:42.561 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.053) 0:13:42.614 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.051) 0:13:42.665 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.051) 0:13:42.717 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.056) 0:13:42.773 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.052) 0:13:42.826 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.051) 0:13:42.877 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.056) 0:13:42.934 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.118) 0:13:43.052 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.111) 0:13:43.164 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.055) 0:13:43.219 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:40:51 -0400 (0:00:00.050) 0:13:43.270 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.050) 0:13:43.320 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.049) 0:13:43.369 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.047) 0:13:43.417 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.048) 0:13:43.466 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.051) 0:13:43.518 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.115) 0:13:43.634 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.109) 0:13:43.743 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.052) 0:13:43.796 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.048) 0:13:43.845 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.049) 0:13:43.894 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.050) 0:13:43.945 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.125) 0:13:44.071 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.060) 0:13:44.131 ******** skipping: [managed-node13] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.056) 0:13:44.188 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node13 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Wednesday 30 July 2025 21:40:52 -0400 (0:00:00.099) 0:13:44.287 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.062) 0:13:44.350 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.130) 0:13:44.480 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.053) 0:13:44.534 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.050) 0:13:44.585 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.053) 0:13:44.638 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.061) 0:13:44.700 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.052) 0:13:44.752 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.131) 0:13:44.884 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.121) 0:13:45.005 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.057) 0:13:45.062 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.052) 0:13:45.115 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.052) 0:13:45.168 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.051) 0:13:45.220 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:40:53 -0400 (0:00:00.051) 0:13:45.272 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.046) 0:13:45.318 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.049) 0:13:45.368 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.100) 0:13:45.469 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.034) 0:13:45.504 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.033) 0:13:45.537 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.041) 0:13:45.579 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.053) 0:13:45.633 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.063) 0:13:45.696 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.052) 0:13:45.748 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.057) 0:13:45.805 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.052) 0:13:45.858 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.111) 0:13:45.969 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.059) 0:13:46.029 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:40:54 -0400 (0:00:00.241) 0:13:46.270 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.043) 0:13:46.313 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.043) 0:13:46.357 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.043) 0:13:46.400 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.042) 0:13:46.443 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.039) 0:13:46.483 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.040) 0:13:46.523 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.039) 0:13:46.563 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.042) 0:13:46.605 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.044) 0:13:46.649 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.055) 0:13:46.705 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.066) 0:13:46.771 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 ext2 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.095) 0:13:46.867 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.063) 0:13:46.930 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.063) 0:13:46.994 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.050) 0:13:47.045 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.062) 0:13:47.107 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.052) 0:13:47.160 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:40:55 -0400 (0:00:00.069) 0:13:47.230 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.075) 0:13:47.305 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753926044.2406747, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1753926044.2406747, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 244996, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1753926044.2406747, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.373) 0:13:47.678 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.068) 0:13:47.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.049) 0:13:47.796 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.063) 0:13:47.859 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.136) 0:13:47.995 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.044) 0:13:48.039 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.049) 0:13:48.089 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:40:56 -0400 (0:00:00.040) 0:13:48.129 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.524) 0:13:48.654 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.053) 0:13:48.707 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.047) 0:13:48.754 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.066) 0:13:48.821 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.043) 0:13:48.864 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.038) 0:13:48.903 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.034) 0:13:48.937 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.033) 0:13:48.970 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.036) 0:13:49.007 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.060) 0:13:49.067 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.058) 0:13:49.127 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.051) 0:13:49.178 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.050) 0:13:49.229 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:40:57 -0400 (0:00:00.050) 0:13:49.279 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.053) 0:13:49.332 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.058) 0:13:49.391 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.053) 0:13:49.445 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.054) 0:13:49.499 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.054) 0:13:49.554 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.052) 0:13:49.606 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.053) 0:13:49.660 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.058) 0:13:49.718 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.054) 0:13:49.773 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.062) 0:13:49.836 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.054) 0:13:49.890 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:40:58 -0400 (0:00:00.371) 0:13:50.262 ******** ok: [managed-node13] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.326) 0:13:50.588 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.062) 0:13:50.651 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.057) 0:13:50.709 ******** ok: [managed-node13] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.372) 0:13:51.081 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.061) 0:13:51.143 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.061) 0:13:51.204 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:40:59 -0400 (0:00:00.064) 0:13:51.269 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.062) 0:13:51.331 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.057) 0:13:51.388 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.061) 0:13:51.450 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.060) 0:13:51.510 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.060) 0:13:51.571 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.062) 0:13:51.633 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.057) 0:13:51.690 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.054) 0:13:51.745 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.057) 0:13:51.803 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.061) 0:13:51.865 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.059) 0:13:51.925 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.061) 0:13:51.986 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.058) 0:13:52.045 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.057) 0:13:52.103 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.057) 0:13:52.160 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.056) 0:13:52.217 ******** ok: [managed-node13] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:41:00 -0400 (0:00:00.059) 0:13:52.276 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.061) 0:13:52.337 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.145) 0:13:52.483 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.018009", "end": "2025-07-30 21:41:01.465042", "rc": 0, "start": "2025-07-30 21:41:01.447033" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.335) 0:13:52.818 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.043) 0:13:52.862 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.048) 0:13:52.911 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.038) 0:13:52.949 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.036) 0:13:52.986 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.038) 0:13:53.025 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.037) 0:13:53.062 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.034) 0:13:53.097 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.032) 0:13:53.130 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up - 3] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:333 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.033) 0:13:53.163 ******** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.074) 0:13:53.238 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Wednesday 30 July 2025 21:41:01 -0400 (0:00:00.053) 0:13:53.291 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.042) 0:13:53.334 ******** skipping: [managed-node13] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node13] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node13] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node13] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.084) 0:13:53.418 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.034) 0:13:53.452 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.032) 0:13:53.485 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.036) 0:13:53.521 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.034) 0:13:53.555 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node13 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.077) 0:13:53.633 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.036) 0:13:53.669 ******** ok: [managed-node13] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "state": "absent", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.043) 0:13:53.712 ******** ok: [managed-node13] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.037) 0:13:53.750 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.033) 0:13:53.783 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.033) 0:13:53.817 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.032) 0:13:53.850 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.036) 0:13:53.887 ******** ok: [managed-node13] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.054) 0:13:53.941 ******** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Wednesday 30 July 2025 21:41:02 -0400 (0:00:00.031) 0:13:53.972 ******** changed: [managed-node13] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext2", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Wednesday 30 July 2025 21:41:07 -0400 (0:00:04.637) 0:13:58.610 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Wednesday 30 July 2025 21:41:07 -0400 (0:00:00.038) 0:13:58.648 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753925999.369481, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "c6abf0a388bb293ed38f7a11686eb491e2373da7", "ctime": 1753925999.366481, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264043, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1753925999.366481, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1280, "uid": 0, "version": "18446744072828510301", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Wednesday 30 July 2025 21:41:07 -0400 (0:00:00.341) 0:13:58.990 ******** ok: [managed-node13] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Wednesday 30 July 2025 21:41:07 -0400 (0:00:00.303) 0:13:59.294 ******** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.039) 0:13:59.333 ******** ok: [managed-node13] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "ext2" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext2", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.060) 0:13:59.393 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.042) 0:13:59.436 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.038) 0:13:59.474 ******** changed: [managed-node13] => (item={u'src': u'/dev/mapper/foo-test1', u'state': u'absent', u'fstype': u'ext2', u'path': u'/opt/test1'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext2", "mount_info": { "fstype": "ext2", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.340) 0:13:59.815 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Wednesday 30 July 2025 21:41:08 -0400 (0:00:00.448) 0:14:00.263 ******** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Wednesday 30 July 2025 21:41:09 -0400 (0:00:00.051) 0:14:00.315 ******** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Wednesday 30 July 2025 21:41:09 -0400 (0:00:00.051) 0:14:00.366 ******** ok: [managed-node13] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Wednesday 30 July 2025 21:41:09 -0400 (0:00:00.480) 0:14:00.847 ******** ok: [managed-node13] => { "changed": false, "stat": { "atime": 1753923917.3395176, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1753923906.9244735, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1753923906.9234734, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072828511893", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Wednesday 30 July 2025 21:41:09 -0400 (0:00:00.335) 0:14:01.182 ******** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Wednesday 30 July 2025 21:41:09 -0400 (0:00:00.039) 0:14:01.222 ******** ok: [managed-node13] TASK [Verify role results - 14] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:346 Wednesday 30 July 2025 21:41:10 -0400 (0:00:00.827) 0:14:02.049 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node13 TASK [Print out pool information] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Wednesday 30 July 2025 21:41:10 -0400 (0:00:00.077) 0:14:02.127 ******** ok: [managed-node13] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext2", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Wednesday 30 July 2025 21:41:10 -0400 (0:00:00.070) 0:14:02.197 ******** skipping: [managed-node13] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Wednesday 30 July 2025 21:41:10 -0400 (0:00:00.052) 0:14:02.250 ******** ok: [managed-node13] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Wednesday 30 July 2025 21:41:12 -0400 (0:00:01.346) 0:14:03.597 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002539", "end": "2025-07-30 21:41:12.567082", "rc": 0, "start": "2025-07-30 21:41:12.564543" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Wednesday 30 July 2025 21:41:12 -0400 (0:00:00.343) 0:14:03.940 ******** ok: [managed-node13] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002580", "end": "2025-07-30 21:41:12.910186", "failed_when_result": false, "rc": 0, "start": "2025-07-30 21:41:12.907606" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Wednesday 30 July 2025 21:41:12 -0400 (0:00:00.345) 0:14:04.286 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node13 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.109) 0:14:04.396 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.055) 0:14:04.452 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.050) 0:14:04.502 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.049) 0:14:04.551 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.111) 0:14:04.663 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.068) 0:14:04.732 ******** TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.046) 0:14:04.778 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.058) 0:14:04.837 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.061) 0:14:04.899 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.063) 0:14:04.962 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.058) 0:14:05.020 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.071) 0:14:05.092 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.052) 0:14:05.145 ******** TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Wednesday 30 July 2025 21:41:13 -0400 (0:00:00.044) 0:14:05.189 ******** ok: [managed-node13] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.10.109 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.265) 0:14:05.454 ******** TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.046) 0:14:05.501 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node13 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.112) 0:14:05.613 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.052) 0:14:05.666 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.048) 0:14:05.714 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.049) 0:14:05.764 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.054) 0:14:05.818 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.052) 0:14:05.871 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.051) 0:14:05.922 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.052) 0:14:05.975 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.051) 0:14:06.027 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.062) 0:14:06.090 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.055) 0:14:06.145 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Wednesday 30 July 2025 21:41:14 -0400 (0:00:00.051) 0:14:06.196 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node13 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.113) 0:14:06.309 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node13 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.115) 0:14:06.425 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.106) 0:14:06.532 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.043) 0:14:06.576 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.038) 0:14:06.615 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.033) 0:14:06.649 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.032) 0:14:06.682 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.034) 0:14:06.716 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.043) 0:14:06.760 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node13 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.112) 0:14:06.872 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node13 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.106) 0:14:06.979 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.056) 0:14:07.035 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.054) 0:14:07.090 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.061) 0:14:07.151 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Wednesday 30 July 2025 21:41:15 -0400 (0:00:00.053) 0:14:07.204 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node13 TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.110) 0:14:07.315 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.050) 0:14:07.365 ******** TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.038) 0:14:07.404 ******** TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.033) 0:14:07.438 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.032) 0:14:07.470 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node13 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.076) 0:14:07.547 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node13 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.070) 0:14:07.617 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.032) 0:14:07.649 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.032) 0:14:07.681 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.037) 0:14:07.718 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off - 2] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.044) 0:14:07.763 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on - 2] ************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.049) 0:14:07.813 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.048) 0:14:07.861 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.049) 0:14:07.911 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node13 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.136) 0:14:08.047 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.052) 0:14:08.100 ******** skipping: [managed-node13] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.055) 0:14:08.156 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.049) 0:14:08.206 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Wednesday 30 July 2025 21:41:16 -0400 (0:00:00.049) 0:14:08.255 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.051) 0:14:08.307 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.051) 0:14:08.358 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.052) 0:14:08.410 ******** ok: [managed-node13] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.051) 0:14:08.461 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node13 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.099) 0:14:08.561 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.064) 0:14:08.625 ******** included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node13 included: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node13 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.257) 0:14:08.883 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.059) 0:14:08.943 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.062) 0:14:09.005 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.048) 0:14:09.054 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.044) 0:14:09.099 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.047) 0:14:09.147 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.044) 0:14:09.191 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.046) 0:14:09.237 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Wednesday 30 July 2025 21:41:17 -0400 (0:00:00.052) 0:14:09.289 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.053) 0:14:09.343 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.053) 0:14:09.396 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.053) 0:14:09.450 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.165) 0:14:09.615 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.051) 0:14:09.666 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.059) 0:14:09.726 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.052) 0:14:09.778 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.061) 0:14:09.840 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.051) 0:14:09.891 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.057) 0:14:09.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Wednesday 30 July 2025 21:41:18 -0400 (0:00:00.053) 0:14:10.002 ******** ok: [managed-node13] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.348) 0:14:10.351 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.049) 0:14:10.401 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.063) 0:14:10.464 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.045) 0:14:10.509 ******** ok: [managed-node13] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.053) 0:14:10.563 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.050) 0:14:10.613 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.042) 0:14:10.655 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.048) 0:14:10.704 ******** ok: [managed-node13] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Wednesday 30 July 2025 21:41:19 -0400 (0:00:00.556) 0:14:11.261 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.053) 0:14:11.314 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.043) 0:14:11.358 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.036) 0:14:11.395 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.039) 0:14:11.434 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.031) 0:14:11.465 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.027) 0:14:11.493 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.027) 0:14:11.521 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.029) 0:14:11.550 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.041) 0:14:11.592 ******** ok: [managed-node13] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.039) 0:14:11.631 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.664 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.031) 0:14:11.696 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.031) 0:14:11.728 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.035) 0:14:11.763 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.795 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.828 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.031) 0:14:11.860 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.893 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.925 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.034) 0:14:11.960 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:11.993 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:12.026 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:12.058 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:12.090 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.032) 0:14:12.123 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.035) 0:14:12.158 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.031) 0:14:12.189 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.035) 0:14:12.225 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Wednesday 30 July 2025 21:41:20 -0400 (0:00:00.047) 0:14:12.273 ******** skipping: [managed-node13] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.049) 0:14:12.322 ******** skipping: [managed-node13] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.045) 0:14:12.368 ******** skipping: [managed-node13] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.053) 0:14:12.422 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.047) 0:14:12.470 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.052) 0:14:12.523 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.053) 0:14:12.576 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.053) 0:14:12.629 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.054) 0:14:12.684 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.056) 0:14:12.741 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.052) 0:14:12.794 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.054) 0:14:12.848 ******** skipping: [managed-node13] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.053) 0:14:12.902 ******** skipping: [managed-node13] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.052) 0:14:12.954 ******** skipping: [managed-node13] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.055) 0:14:13.009 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.059) 0:14:13.069 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.130) 0:14:13.200 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Wednesday 30 July 2025 21:41:21 -0400 (0:00:00.057) 0:14:13.257 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.056) 0:14:13.314 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.057) 0:14:13.371 ******** ok: [managed-node13] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.058) 0:14:13.429 ******** ok: [managed-node13] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.055) 0:14:13.484 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.052) 0:14:13.537 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.049) 0:14:13.587 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.051) 0:14:13.639 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.054) 0:14:13.694 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.051) 0:14:13.746 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.050) 0:14:13.796 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.054) 0:14:13.850 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.049) 0:14:13.900 ******** ok: [managed-node13] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.050) 0:14:13.950 ******** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.045) 0:14:13.995 ******** ok: [managed-node13] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Gather package facts] **************************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:349 Wednesday 30 July 2025 21:41:22 -0400 (0:00:00.048) 0:14:14.044 ******** ok: [managed-node13] => { "ansible_facts": { "packages": { "NetworkManager": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager", "release": "2.el7_9", "source": "rpm", "version": "1.18.8" } ], "NetworkManager-libnm": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-libnm", "release": "2.el7_9", "source": "rpm", "version": "1.18.8" } ], "NetworkManager-team": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-team", "release": "2.el7_9", "source": "rpm", "version": "1.18.8" } ], "NetworkManager-tui": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-tui", "release": "2.el7_9", "source": "rpm", "version": "1.18.8" } ], "PyYAML": [ { "arch": "x86_64", "epoch": null, "name": "PyYAML", "release": "11.el7", "source": "rpm", "version": "3.10" } ], "acl": [ { "arch": "x86_64", "epoch": null, "name": "acl", "release": "15.el7", "source": "rpm", "version": "2.2.51" } ], "aic94xx-firmware": [ { "arch": "noarch", "epoch": null, "name": "aic94xx-firmware", "release": "6.el7", "source": "rpm", "version": "30" } ], "alsa-firmware": [ { "arch": "noarch", "epoch": null, "name": "alsa-firmware", "release": "2.el7", "source": "rpm", "version": "1.0.28" } ], "alsa-lib": [ { "arch": "x86_64", "epoch": null, "name": "alsa-lib", "release": "1.el7", "source": "rpm", "version": "1.1.8" } ], "alsa-tools-firmware": [ { "arch": "x86_64", "epoch": null, "name": "alsa-tools-firmware", "release": "1.el7", "source": "rpm", "version": "1.1.0" } ], "aspell": [ { "arch": "x86_64", "epoch": 12, "name": "aspell", "release": "9.el7", "source": "rpm", "version": "0.60.6.1" } ], "audit": [ { "arch": "x86_64", "epoch": null, "name": "audit", "release": "4.el7", "source": "rpm", "version": "2.8.5" } ], "audit-libs": [ { "arch": "x86_64", "epoch": null, "name": "audit-libs", "release": "4.el7", "source": "rpm", "version": "2.8.5" } ], "audit-libs-python": [ { "arch": "x86_64", "epoch": null, "name": "audit-libs-python", "release": "4.el7", "source": "rpm", "version": "2.8.5" } ], "authconfig": [ { "arch": "x86_64", "epoch": null, "name": "authconfig", "release": "30.el7", "source": "rpm", "version": "6.2.8" } ], "avahi-libs": [ { "arch": "x86_64", "epoch": null, "name": "avahi-libs", "release": "20.el7", "source": "rpm", "version": "0.6.31" } ], "basesystem": [ { "arch": "noarch", "epoch": null, "name": "basesystem", "release": "7.el7.centos", "source": "rpm", "version": "10.0" } ], "bash": [ { "arch": "x86_64", "epoch": null, "name": "bash", "release": "35.el7_9", "source": "rpm", "version": "4.2.46" } ], "bc": [ { "arch": "x86_64", "epoch": null, "name": "bc", "release": "13.el7", "source": "rpm", "version": "1.06.95" } ], "beakerlib": [ { "arch": "noarch", "epoch": null, "name": "beakerlib", "release": "1.el7bkr", "source": "rpm", "version": "1.29.3" } ], "beakerlib-redhat": [ { "arch": "noarch", "epoch": null, "name": "beakerlib-redhat", "release": "33.el7bkr", "source": "rpm", "version": "1" } ], "bind-export-libs": [ { "arch": "x86_64", "epoch": 32, "name": "bind-export-libs", "release": "26.P2.el7_9.16", "source": "rpm", "version": "9.11.4" } ], "binutils": [ { "arch": "x86_64", "epoch": null, "name": "binutils", "release": "44.base.el7_9.1", "source": "rpm", "version": "2.27" } ], "biosdevname": [ { "arch": "x86_64", "epoch": null, "name": "biosdevname", "release": "2.el7", "source": "rpm", "version": "0.7.3" } ], "blivet3-data": [ { "arch": "noarch", "epoch": 1, "name": "blivet3-data", "release": "3.el7", "source": "rpm", "version": "3.1.3" } ], "boost-date-time": [ { "arch": "x86_64", "epoch": null, "name": "boost-date-time", "release": "28.el7", "source": "rpm", "version": "1.53.0" } ], "boost-system": [ { "arch": "x86_64", "epoch": null, "name": "boost-system", "release": "28.el7", "source": "rpm", "version": "1.53.0" } ], "boost-thread": [ { "arch": "x86_64", "epoch": null, "name": "boost-thread", "release": "28.el7", "source": "rpm", "version": "1.53.0" } ], "btrfs-progs": [ { "arch": "x86_64", "epoch": null, "name": "btrfs-progs", "release": "1.el7", "source": "rpm", "version": "4.9.1" } ], "bzip2-libs": [ { "arch": "x86_64", "epoch": null, "name": "bzip2-libs", "release": "13.el7", "source": "rpm", "version": "1.0.6" } ], "ca-certificates": [ { "arch": "noarch", "epoch": null, "name": "ca-certificates", "release": "72.el7_9", "source": "rpm", "version": "2023.2.60_v7.0.306" } ], "centos-logos": [ { "arch": "noarch", "epoch": null, "name": "centos-logos", "release": "3.el7.centos", "source": "rpm", "version": "70.0.6" } ], "centos-release": [ { "arch": "x86_64", "epoch": null, "name": "centos-release", "release": "9.2009.2.el7.centos", "source": "rpm", "version": "7" } ], "checkpolicy": [ { "arch": "x86_64", "epoch": null, "name": "checkpolicy", "release": "8.el7", "source": "rpm", "version": "2.5" } ], "chkconfig": [ { "arch": "x86_64", "epoch": null, "name": "chkconfig", "release": "1.el7", "source": "rpm", "version": "1.7.6" } ], "chrony": [ { "arch": "x86_64", "epoch": null, "name": "chrony", "release": "1.el7", "source": "rpm", "version": "3.4" } ], "cloud-init": [ { "arch": "x86_64", "epoch": null, "name": "cloud-init", "release": "24.el7", "source": "rpm", "version": "0.7.9" } ], "cloud-utils-growpart": [ { "arch": "noarch", "epoch": null, "name": "cloud-utils-growpart", "release": "5.el7", "source": "rpm", "version": "0.29" } ], "coreutils": [ { "arch": "x86_64", "epoch": null, "name": "coreutils", "release": "24.el7_9.2", "source": "rpm", "version": "8.22" } ], "cpio": [ { "arch": "x86_64", "epoch": null, "name": "cpio", "release": "28.el7", "source": "rpm", "version": "2.11" } ], "cpp": [ { "arch": "x86_64", "epoch": null, "name": "cpp", "release": "44.el7", "source": "rpm", "version": "4.8.5" } ], "cracklib": [ { "arch": "x86_64", "epoch": null, "name": "cracklib", "release": "11.el7", "source": "rpm", "version": "2.9.0" } ], "cracklib-dicts": [ { "arch": "x86_64", "epoch": null, "name": "cracklib-dicts", "release": "11.el7", "source": "rpm", "version": "2.9.0" } ], "cronie": [ { "arch": "x86_64", "epoch": null, "name": "cronie", "release": "25.el7_9", "source": "rpm", "version": "1.4.11" } ], "cronie-anacron": [ { "arch": "x86_64", "epoch": null, "name": "cronie-anacron", "release": "25.el7_9", "source": "rpm", "version": "1.4.11" } ], "crontabs": [ { "arch": "noarch", "epoch": null, "name": "crontabs", "release": "6.20121102git.el7", "source": "rpm", "version": "1.11" } ], "cryptsetup": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup", "release": "6.el7", "source": "rpm", "version": "2.0.3" } ], "cryptsetup-libs": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup-libs", "release": "6.el7", "source": "rpm", "version": "2.0.3" } ], "curl": [ { "arch": "x86_64", "epoch": null, "name": "curl", "release": "59.el7_9.2", "source": "rpm", "version": "7.29.0" } ], "cyrus-sasl-lib": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-lib", "release": "24.el7_9", "source": "rpm", "version": "2.1.26" } ], "dbus": [ { "arch": "x86_64", "epoch": 1, "name": "dbus", "release": "15.el7", "source": "rpm", "version": "1.10.24" } ], "dbus-glib": [ { "arch": "x86_64", "epoch": null, "name": "dbus-glib", "release": "7.el7", "source": "rpm", "version": "0.100" } ], "dbus-libs": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-libs", "release": "15.el7", "source": "rpm", "version": "1.10.24" } ], "dbus-python": [ { "arch": "x86_64", "epoch": null, "name": "dbus-python", "release": "9.el7", "source": "rpm", "version": "1.1.1" } ], "device-mapper": [ { "arch": "x86_64", "epoch": 7, "name": "device-mapper", "release": "6.el7_9.5", "source": "rpm", "version": "1.02.170" } ], "device-mapper-event": [ { "arch": "x86_64", "epoch": 7, "name": "device-mapper-event", "release": "6.el7_9.5", "source": "rpm", "version": "1.02.170" } ], "device-mapper-event-libs": [ { "arch": "x86_64", "epoch": 7, "name": "device-mapper-event-libs", "release": "6.el7_9.5", "source": "rpm", "version": "1.02.170" } ], "device-mapper-libs": [ { "arch": "x86_64", "epoch": 7, "name": "device-mapper-libs", "release": "6.el7_9.5", "source": "rpm", "version": "1.02.170" } ], "device-mapper-persistent-data": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-persistent-data", "release": "3.el7_9.2", "source": "rpm", "version": "0.8.5" } ], "dhclient": [ { "arch": "x86_64", "epoch": 12, "name": "dhclient", "release": "83.el7.centos.2", "source": "rpm", "version": "4.2.5" } ], "dhcp-common": [ { "arch": "x86_64", "epoch": 12, "name": "dhcp-common", "release": "83.el7.centos.2", "source": "rpm", "version": "4.2.5" } ], "dhcp-libs": [ { "arch": "x86_64", "epoch": 12, "name": "dhcp-libs", "release": "83.el7.centos.2", "source": "rpm", "version": "4.2.5" } ], "diffutils": [ { "arch": "x86_64", "epoch": null, "name": "diffutils", "release": "6.el7_9", "source": "rpm", "version": "3.3" } ], "dmidecode": [ { "arch": "x86_64", "epoch": 1, "name": "dmidecode", "release": "5.el7_9.1", "source": "rpm", "version": "3.2" } ], "dmraid": [ { "arch": "x86_64", "epoch": null, "name": "dmraid", "release": "28.el7", "source": "rpm", "version": "1.0.0.rc16" } ], "dmraid-events": [ { "arch": "x86_64", "epoch": null, "name": "dmraid-events", "release": "28.el7", "source": "rpm", "version": "1.0.0.rc16" } ], "dracut": [ { "arch": "x86_64", "epoch": null, "name": "dracut", "release": "572.el7", "source": "rpm", "version": "033" } ], "dracut-config-rescue": [ { "arch": "x86_64", "epoch": null, "name": "dracut-config-rescue", "release": "572.el7", "source": "rpm", "version": "033" } ], "dracut-network": [ { "arch": "x86_64", "epoch": null, "name": "dracut-network", "release": "572.el7", "source": "rpm", "version": "033" } ], "dyninst": [ { "arch": "x86_64", "epoch": null, "name": "dyninst", "release": "3.el7", "source": "rpm", "version": "9.3.1" } ], "e2fsprogs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs", "release": "19.el7", "source": "rpm", "version": "1.42.9" } ], "e2fsprogs-libs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs-libs", "release": "19.el7", "source": "rpm", "version": "1.42.9" } ], "ebtables": [ { "arch": "x86_64", "epoch": null, "name": "ebtables", "release": "16.el7", "source": "rpm", "version": "2.0.10" } ], "efivar-libs": [ { "arch": "x86_64", "epoch": null, "name": "efivar-libs", "release": "12.el7", "source": "rpm", "version": "36" } ], "elfutils-default-yama-scope": [ { "arch": "noarch", "epoch": null, "name": "elfutils-default-yama-scope", "release": "5.el7", "source": "rpm", "version": "0.176" } ], "elfutils-libelf": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf", "release": "5.el7", "source": "rpm", "version": "0.176" } ], "elfutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libs", "release": "5.el7", "source": "rpm", "version": "0.176" } ], "epel-release": [ { "arch": "noarch", "epoch": null, "name": "epel-release", "release": "14", "source": "rpm", "version": "7" } ], "ethtool": [ { "arch": "x86_64", "epoch": 2, "name": "ethtool", "release": "10.el7", "source": "rpm", "version": "4.8" } ], "expat": [ { "arch": "x86_64", "epoch": null, "name": "expat", "release": "15.el7_9", "source": "rpm", "version": "2.1.0" } ], "file": [ { "arch": "x86_64", "epoch": null, "name": "file", "release": "37.el7", "source": "rpm", "version": "5.11" } ], "file-libs": [ { "arch": "x86_64", "epoch": null, "name": "file-libs", "release": "37.el7", "source": "rpm", "version": "5.11" } ], "filesystem": [ { "arch": "x86_64", "epoch": null, "name": "filesystem", "release": "25.el7", "source": "rpm", "version": "3.2" } ], "findutils": [ { "arch": "x86_64", "epoch": 1, "name": "findutils", "release": "6.el7", "source": "rpm", "version": "4.5.11" } ], "fipscheck": [ { "arch": "x86_64", "epoch": null, "name": "fipscheck", "release": "6.el7", "source": "rpm", "version": "1.4.1" } ], "fipscheck-lib": [ { "arch": "x86_64", "epoch": null, "name": "fipscheck-lib", "release": "6.el7", "source": "rpm", "version": "1.4.1" } ], "firewalld": [ { "arch": "noarch", "epoch": null, "name": "firewalld", "release": "13.el7_9", "source": "rpm", "version": "0.6.3" } ], "firewalld-filesystem": [ { "arch": "noarch", "epoch": null, "name": "firewalld-filesystem", "release": "13.el7_9", "source": "rpm", "version": "0.6.3" } ], "freetype": [ { "arch": "x86_64", "epoch": null, "name": "freetype", "release": "14.el7_9.1", "source": "rpm", "version": "2.8" } ], "fxload": [ { "arch": "x86_64", "epoch": null, "name": "fxload", "release": "16.el7", "source": "rpm", "version": "2002_04_11" } ], "gawk": [ { "arch": "x86_64", "epoch": null, "name": "gawk", "release": "4.el7_3.1", "source": "rpm", "version": "4.0.2" } ], "gcc": [ { "arch": "x86_64", "epoch": null, "name": "gcc", "release": "44.el7", "source": "rpm", "version": "4.8.5" } ], "gdbm": [ { "arch": "x86_64", "epoch": null, "name": "gdbm", "release": "8.el7", "source": "rpm", "version": "1.10" } ], "gettext": [ { "arch": "x86_64", "epoch": null, "name": "gettext", "release": "3.el7", "source": "rpm", "version": "0.19.8.1" } ], "gettext-libs": [ { "arch": "x86_64", "epoch": null, "name": "gettext-libs", "release": "3.el7", "source": "rpm", "version": "0.19.8.1" } ], "git": [ { "arch": "x86_64", "epoch": null, "name": "git", "release": "25.el7_9", "source": "rpm", "version": "1.8.3.1" } ], "glib2": [ { "arch": "x86_64", "epoch": null, "name": "glib2", "release": "9.el7_9", "source": "rpm", "version": "2.56.1" } ], "glibc": [ { "arch": "x86_64", "epoch": null, "name": "glibc", "release": "326.el7_9.3", "source": "rpm", "version": "2.17" } ], "glibc-common": [ { "arch": "x86_64", "epoch": null, "name": "glibc-common", "release": "326.el7_9.3", "source": "rpm", "version": "2.17" } ], "glibc-devel": [ { "arch": "x86_64", "epoch": null, "name": "glibc-devel", "release": "326.el7_9.3", "source": "rpm", "version": "2.17" } ], "glibc-headers": [ { "arch": "x86_64", "epoch": null, "name": "glibc-headers", "release": "326.el7_9.3", "source": "rpm", "version": "2.17" } ], "gmp": [ { "arch": "x86_64", "epoch": 1, "name": "gmp", "release": "15.el7", "source": "rpm", "version": "6.0.0" } ], "gnupg2": [ { "arch": "x86_64", "epoch": null, "name": "gnupg2", "release": "5.el7_5", "source": "rpm", "version": "2.0.22" } ], "gobject-introspection": [ { "arch": "x86_64", "epoch": null, "name": "gobject-introspection", "release": "1.el7", "source": "rpm", "version": "1.56.1" } ], "gpg-pubkey": [ { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "52ae6884", "source": "rpm", "version": "352c64e5" }, { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "53a7ff4b", "source": "rpm", "version": "f4a80eb5" } ], "gpgme": [ { "arch": "x86_64", "epoch": null, "name": "gpgme", "release": "5.el7", "source": "rpm", "version": "1.3.2" } ], "gpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "gpm-libs", "release": "6.el7", "source": "rpm", "version": "1.20.7" } ], "grep": [ { "arch": "x86_64", "epoch": null, "name": "grep", "release": "3.el7", "source": "rpm", "version": "2.20" } ], "groff-base": [ { "arch": "x86_64", "epoch": null, "name": "groff-base", "release": "8.el7", "source": "rpm", "version": "1.22.2" } ], "grub2": [ { "arch": "x86_64", "epoch": 1, "name": "grub2", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-common": [ { "arch": "noarch", "epoch": 1, "name": "grub2-common", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-pc": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-pc", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-pc-modules": [ { "arch": "noarch", "epoch": 1, "name": "grub2-pc-modules", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-tools": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-tools-extra": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-extra", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grub2-tools-minimal": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-minimal", "release": "0.87.0.2.el7.centos.14", "source": "rpm", "version": "2.02" } ], "grubby": [ { "arch": "x86_64", "epoch": null, "name": "grubby", "release": "26.el7", "source": "rpm", "version": "8.28" } ], "gssproxy": [ { "arch": "x86_64", "epoch": null, "name": "gssproxy", "release": "30.el7_9", "source": "rpm", "version": "0.7.0" } ], "gzip": [ { "arch": "x86_64", "epoch": null, "name": "gzip", "release": "11.el7_9", "source": "rpm", "version": "1.5" } ], "hardlink": [ { "arch": "x86_64", "epoch": 1, "name": "hardlink", "release": "19.el7", "source": "rpm", "version": "1.0" } ], "hostname": [ { "arch": "x86_64", "epoch": null, "name": "hostname", "release": "3.el7_7.1", "source": "rpm", "version": "3.13" } ], "hwdata": [ { "arch": "x86_64", "epoch": null, "name": "hwdata", "release": "9.7.el7", "source": "rpm", "version": "0.252" } ], "info": [ { "arch": "x86_64", "epoch": null, "name": "info", "release": "5.el7", "source": "rpm", "version": "5.1" } ], "initscripts": [ { "arch": "x86_64", "epoch": null, "name": "initscripts", "release": "1.el7_9.1", "source": "rpm", "version": "9.49.53" } ], "iproute": [ { "arch": "x86_64", "epoch": null, "name": "iproute", "release": "30.el7", "source": "rpm", "version": "4.11.0" } ], "iprutils": [ { "arch": "x86_64", "epoch": null, "name": "iprutils", "release": "3.el7_7", "source": "rpm", "version": "2.4.17.1" } ], "ipset": [ { "arch": "x86_64", "epoch": null, "name": "ipset", "release": "1.el7", "source": "rpm", "version": "7.1" } ], "ipset-libs": [ { "arch": "x86_64", "epoch": null, "name": "ipset-libs", "release": "1.el7", "source": "rpm", "version": "7.1" } ], "iptables": [ { "arch": "x86_64", "epoch": null, "name": "iptables", "release": "35.el7", "source": "rpm", "version": "1.4.21" } ], "iputils": [ { "arch": "x86_64", "epoch": null, "name": "iputils", "release": "10.el7", "source": "rpm", "version": "20160308" } ], "irqbalance": [ { "arch": "x86_64", "epoch": 3, "name": "irqbalance", "release": "12.el7", "source": "rpm", "version": "1.0.7" } ], "ivtv-firmware": [ { "arch": "noarch", "epoch": 2, "name": "ivtv-firmware", "release": "26.el7", "source": "rpm", "version": "20080701" } ], "iwl100-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl100-firmware", "release": "82.el7_9", "source": "rpm", "version": "39.31.5.1" } ], "iwl1000-firmware": [ { "arch": "noarch", "epoch": 1, "name": "iwl1000-firmware", "release": "82.el7_9", "source": "rpm", "version": "39.31.5.1" } ], "iwl105-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl105-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl135-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl135-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl2000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl2000-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl2030-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl2030-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl3160-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl3160-firmware", "release": "82.el7_9", "source": "rpm", "version": "25.30.13.0" } ], "iwl3945-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl3945-firmware", "release": "82.el7_9", "source": "rpm", "version": "15.32.2.9" } ], "iwl4965-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl4965-firmware", "release": "82.el7_9", "source": "rpm", "version": "228.61.2.24" } ], "iwl5000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl5000-firmware", "release": "82.el7_9", "source": "rpm", "version": "8.83.5.1_1" } ], "iwl5150-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl5150-firmware", "release": "82.el7_9", "source": "rpm", "version": "8.24.2.2" } ], "iwl6000-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6000-firmware", "release": "82.el7_9", "source": "rpm", "version": "9.221.4.1" } ], "iwl6000g2a-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6000g2a-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl6000g2b-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6000g2b-firmware", "release": "82.el7_9", "source": "rpm", "version": "18.168.6.1" } ], "iwl6050-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl6050-firmware", "release": "82.el7_9", "source": "rpm", "version": "41.28.5.1" } ], "iwl7260-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwl7260-firmware", "release": "82.el7_9", "source": "rpm", "version": "25.30.13.0" } ], "jansson": [ { "arch": "x86_64", "epoch": null, "name": "jansson", "release": "1.el7", "source": "rpm", "version": "2.10" } ], "json-c": [ { "arch": "x86_64", "epoch": null, "name": "json-c", "release": "4.el7_0", "source": "rpm", "version": "0.11" } ], "kbd": [ { "arch": "x86_64", "epoch": null, "name": "kbd", "release": "16.el7_9", "source": "rpm", "version": "1.15.5" } ], "kbd-legacy": [ { "arch": "noarch", "epoch": null, "name": "kbd-legacy", "release": "16.el7_9", "source": "rpm", "version": "1.15.5" } ], "kbd-misc": [ { "arch": "noarch", "epoch": null, "name": "kbd-misc", "release": "16.el7_9", "source": "rpm", "version": "1.15.5" } ], "kernel": [ { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" }, { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "1160.el7", "source": "rpm", "version": "3.10.0" } ], "kernel-debug-devel": [ { "arch": "x86_64", "epoch": null, "name": "kernel-debug-devel", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" } ], "kernel-headers": [ { "arch": "x86_64", "epoch": null, "name": "kernel-headers", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" } ], "kernel-tools": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" } ], "kernel-tools-libs": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools-libs", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" } ], "kexec-tools": [ { "arch": "x86_64", "epoch": null, "name": "kexec-tools", "release": "51.el7_9.3", "source": "rpm", "version": "2.0.15" } ], "keyutils": [ { "arch": "x86_64", "epoch": null, "name": "keyutils", "release": "3.el7", "source": "rpm", "version": "1.5.8" } ], "keyutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "keyutils-libs", "release": "3.el7", "source": "rpm", "version": "1.5.8" } ], "kmod": [ { "arch": "x86_64", "epoch": null, "name": "kmod", "release": "28.el7", "source": "rpm", "version": "20" } ], "kmod-libs": [ { "arch": "x86_64", "epoch": null, "name": "kmod-libs", "release": "28.el7", "source": "rpm", "version": "20" } ], "kpartx": [ { "arch": "x86_64", "epoch": null, "name": "kpartx", "release": "136.el7_9", "source": "rpm", "version": "0.4.9" } ], "krb5-libs": [ { "arch": "x86_64", "epoch": null, "name": "krb5-libs", "release": "55.el7_9", "source": "rpm", "version": "1.15.1" } ], "less": [ { "arch": "x86_64", "epoch": null, "name": "less", "release": "10.el7_9", "source": "rpm", "version": "458" } ], "libacl": [ { "arch": "x86_64", "epoch": null, "name": "libacl", "release": "15.el7", "source": "rpm", "version": "2.2.51" } ], "libaio": [ { "arch": "x86_64", "epoch": null, "name": "libaio", "release": "13.el7", "source": "rpm", "version": "0.3.109" } ], "libassuan": [ { "arch": "x86_64", "epoch": null, "name": "libassuan", "release": "3.el7", "source": "rpm", "version": "2.1.0" } ], "libattr": [ { "arch": "x86_64", "epoch": null, "name": "libattr", "release": "13.el7", "source": "rpm", "version": "2.4.46" } ], "libbasicobjects": [ { "arch": "x86_64", "epoch": null, "name": "libbasicobjects", "release": "32.el7", "source": "rpm", "version": "0.1.1" } ], "libblkid": [ { "arch": "x86_64", "epoch": null, "name": "libblkid", "release": "65.el7_9.1", "source": "rpm", "version": "2.23.2" } ], "libblockdev": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-crypto": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-crypto", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-dm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-dm", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-lvm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-lvm", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-mdraid": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-mdraid", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-swap": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-swap", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libblockdev-utils": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-utils", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "libbytesize": [ { "arch": "x86_64", "epoch": null, "name": "libbytesize", "release": "1.el7", "source": "rpm", "version": "1.2" } ], "libcap": [ { "arch": "x86_64", "epoch": null, "name": "libcap", "release": "11.el7", "source": "rpm", "version": "2.22" } ], "libcap-ng": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng", "release": "4.el7", "source": "rpm", "version": "0.7.5" } ], "libcgroup": [ { "arch": "x86_64", "epoch": null, "name": "libcgroup", "release": "21.el7", "source": "rpm", "version": "0.41" } ], "libcollection": [ { "arch": "x86_64", "epoch": null, "name": "libcollection", "release": "32.el7", "source": "rpm", "version": "0.7.0" } ], "libcom_err": [ { "arch": "x86_64", "epoch": null, "name": "libcom_err", "release": "19.el7", "source": "rpm", "version": "1.42.9" } ], "libcroco": [ { "arch": "x86_64", "epoch": null, "name": "libcroco", "release": "6.el7_9", "source": "rpm", "version": "0.6.12" } ], "libcurl": [ { "arch": "x86_64", "epoch": null, "name": "libcurl", "release": "59.el7_9.2", "source": "rpm", "version": "7.29.0" } ], "libdaemon": [ { "arch": "x86_64", "epoch": null, "name": "libdaemon", "release": "7.el7", "source": "rpm", "version": "0.14" } ], "libdb": [ { "arch": "x86_64", "epoch": null, "name": "libdb", "release": "25.el7", "source": "rpm", "version": "5.3.21" } ], "libdb-utils": [ { "arch": "x86_64", "epoch": null, "name": "libdb-utils", "release": "25.el7", "source": "rpm", "version": "5.3.21" } ], "libdnf": [ { "arch": "x86_64", "epoch": null, "name": "libdnf", "release": "2.el7_9", "source": "rpm", "version": "0.22.5" } ], "libdrm": [ { "arch": "x86_64", "epoch": null, "name": "libdrm", "release": "2.el7", "source": "rpm", "version": "2.4.97" } ], "libdwarf": [ { "arch": "x86_64", "epoch": null, "name": "libdwarf", "release": "4.el7", "source": "rpm", "version": "20130207" } ], "libedit": [ { "arch": "x86_64", "epoch": null, "name": "libedit", "release": "12.20121213cvs.el7", "source": "rpm", "version": "3.0" } ], "libestr": [ { "arch": "x86_64", "epoch": null, "name": "libestr", "release": "2.el7", "source": "rpm", "version": "0.1.9" } ], "libevent": [ { "arch": "x86_64", "epoch": null, "name": "libevent", "release": "4.el7", "source": "rpm", "version": "2.0.21" } ], "libfastjson": [ { "arch": "x86_64", "epoch": null, "name": "libfastjson", "release": "3.el7", "source": "rpm", "version": "0.99.4" } ], "libffi": [ { "arch": "x86_64", "epoch": null, "name": "libffi", "release": "19.el7", "source": "rpm", "version": "3.0.13" } ], "libgcc": [ { "arch": "x86_64", "epoch": null, "name": "libgcc", "release": "44.el7", "source": "rpm", "version": "4.8.5" } ], "libgcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libgcrypt", "release": "14.el7", "source": "rpm", "version": "1.5.3" } ], "libgomp": [ { "arch": "x86_64", "epoch": null, "name": "libgomp", "release": "44.el7", "source": "rpm", "version": "4.8.5" } ], "libgpg-error": [ { "arch": "x86_64", "epoch": null, "name": "libgpg-error", "release": "3.el7", "source": "rpm", "version": "1.12" } ], "libidn": [ { "arch": "x86_64", "epoch": null, "name": "libidn", "release": "4.el7", "source": "rpm", "version": "1.28" } ], "libini_config": [ { "arch": "x86_64", "epoch": null, "name": "libini_config", "release": "32.el7", "source": "rpm", "version": "1.3.1" } ], "libmnl": [ { "arch": "x86_64", "epoch": null, "name": "libmnl", "release": "7.el7", "source": "rpm", "version": "1.0.3" } ], "libmodulemd": [ { "arch": "x86_64", "epoch": null, "name": "libmodulemd", "release": "1.el7", "source": "rpm", "version": "1.6.3" } ], "libmount": [ { "arch": "x86_64", "epoch": null, "name": "libmount", "release": "65.el7_9.1", "source": "rpm", "version": "2.23.2" } ], "libmpc": [ { "arch": "x86_64", "epoch": null, "name": "libmpc", "release": "3.el7", "source": "rpm", "version": "1.0.1" } ], "libndp": [ { "arch": "x86_64", "epoch": null, "name": "libndp", "release": "9.el7", "source": "rpm", "version": "1.2" } ], "libnetfilter_conntrack": [ { "arch": "x86_64", "epoch": null, "name": "libnetfilter_conntrack", "release": "1.el7_3", "source": "rpm", "version": "1.0.6" } ], "libnfnetlink": [ { "arch": "x86_64", "epoch": null, "name": "libnfnetlink", "release": "4.el7", "source": "rpm", "version": "1.0.1" } ], "libnfsidmap": [ { "arch": "x86_64", "epoch": null, "name": "libnfsidmap", "release": "19.el7", "source": "rpm", "version": "0.25" } ], "libnl": [ { "arch": "x86_64", "epoch": null, "name": "libnl", "release": "3.el7", "source": "rpm", "version": "1.1.4" } ], "libnl3": [ { "arch": "x86_64", "epoch": null, "name": "libnl3", "release": "4.el7", "source": "rpm", "version": "3.2.28" } ], "libnl3-cli": [ { "arch": "x86_64", "epoch": null, "name": "libnl3-cli", "release": "4.el7", "source": "rpm", "version": "3.2.28" } ], "libpath_utils": [ { "arch": "x86_64", "epoch": null, "name": "libpath_utils", "release": "32.el7", "source": "rpm", "version": "0.2.1" } ], "libpciaccess": [ { "arch": "x86_64", "epoch": null, "name": "libpciaccess", "release": "1.el7", "source": "rpm", "version": "0.14" } ], "libpipeline": [ { "arch": "x86_64", "epoch": null, "name": "libpipeline", "release": "3.el7", "source": "rpm", "version": "1.2.3" } ], "libpng": [ { "arch": "x86_64", "epoch": 2, "name": "libpng", "release": "8.el7", "source": "rpm", "version": "1.5.13" } ], "libpwquality": [ { "arch": "x86_64", "epoch": null, "name": "libpwquality", "release": "5.el7", "source": "rpm", "version": "1.2.3" } ], "libref_array": [ { "arch": "x86_64", "epoch": null, "name": "libref_array", "release": "32.el7", "source": "rpm", "version": "0.1.5" } ], "librepo": [ { "arch": "x86_64", "epoch": null, "name": "librepo", "release": "8.el7_9", "source": "rpm", "version": "1.8.1" } ], "libreport-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "libreport-filesystem", "release": "53.el7.centos", "source": "rpm", "version": "2.1.11" } ], "libseccomp": [ { "arch": "x86_64", "epoch": null, "name": "libseccomp", "release": "4.el7", "source": "rpm", "version": "2.3.1" } ], "libselinux": [ { "arch": "x86_64", "epoch": null, "name": "libselinux", "release": "15.el7", "source": "rpm", "version": "2.5" } ], "libselinux-python": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-python", "release": "15.el7", "source": "rpm", "version": "2.5" } ], "libselinux-utils": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-utils", "release": "15.el7", "source": "rpm", "version": "2.5" } ], "libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "libsemanage", "release": "14.el7", "source": "rpm", "version": "2.5" } ], "libsemanage-python": [ { "arch": "x86_64", "epoch": null, "name": "libsemanage-python", "release": "14.el7", "source": "rpm", "version": "2.5" } ], "libsepol": [ { "arch": "x86_64", "epoch": null, "name": "libsepol", "release": "10.el7", "source": "rpm", "version": "2.5" } ], "libsmartcols": [ { "arch": "x86_64", "epoch": null, "name": "libsmartcols", "release": "65.el7_9.1", "source": "rpm", "version": "2.23.2" } ], "libsolv": [ { "arch": "x86_64", "epoch": null, "name": "libsolv", "release": "4.el7", "source": "rpm", "version": "0.6.34" } ], "libss": [ { "arch": "x86_64", "epoch": null, "name": "libss", "release": "19.el7", "source": "rpm", "version": "1.42.9" } ], "libssh2": [ { "arch": "x86_64", "epoch": null, "name": "libssh2", "release": "4.el7_9.1", "source": "rpm", "version": "1.8.0" } ], "libstdc++": [ { "arch": "x86_64", "epoch": null, "name": "libstdc++", "release": "44.el7", "source": "rpm", "version": "4.8.5" } ], "libsysfs": [ { "arch": "x86_64", "epoch": null, "name": "libsysfs", "release": "16.el7", "source": "rpm", "version": "2.1.0" } ], "libtasn1": [ { "arch": "x86_64", "epoch": null, "name": "libtasn1", "release": "1.el7", "source": "rpm", "version": "4.10" } ], "libteam": [ { "arch": "x86_64", "epoch": null, "name": "libteam", "release": "3.el7", "source": "rpm", "version": "1.29" } ], "libtirpc": [ { "arch": "x86_64", "epoch": null, "name": "libtirpc", "release": "0.16.el7", "source": "rpm", "version": "0.2.4" } ], "libunistring": [ { "arch": "x86_64", "epoch": null, "name": "libunistring", "release": "9.el7", "source": "rpm", "version": "0.9.3" } ], "libuser": [ { "arch": "x86_64", "epoch": null, "name": "libuser", "release": "9.el7", "source": "rpm", "version": "0.60" } ], "libutempter": [ { "arch": "x86_64", "epoch": null, "name": "libutempter", "release": "4.el7", "source": "rpm", "version": "1.1.6" } ], "libuuid": [ { "arch": "x86_64", "epoch": null, "name": "libuuid", "release": "65.el7_9.1", "source": "rpm", "version": "2.23.2" } ], "libverto": [ { "arch": "x86_64", "epoch": null, "name": "libverto", "release": "4.el7", "source": "rpm", "version": "0.2.5" } ], "libverto-libevent": [ { "arch": "x86_64", "epoch": null, "name": "libverto-libevent", "release": "4.el7", "source": "rpm", "version": "0.2.5" } ], "libxml2": [ { "arch": "x86_64", "epoch": null, "name": "libxml2", "release": "6.el7_9.6", "source": "rpm", "version": "2.9.1" } ], "libxml2-python": [ { "arch": "x86_64", "epoch": null, "name": "libxml2-python", "release": "6.el7_9.6", "source": "rpm", "version": "2.9.1" } ], "libxslt": [ { "arch": "x86_64", "epoch": null, "name": "libxslt", "release": "6.el7", "source": "rpm", "version": "1.1.28" } ], "libyaml": [ { "arch": "x86_64", "epoch": null, "name": "libyaml", "release": "11.el7_0", "source": "rpm", "version": "0.1.4" } ], "linux-firmware": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware", "release": "82.git78c0348.el7_9", "source": "rpm", "version": "20200421" } ], "logrotate": [ { "arch": "x86_64", "epoch": null, "name": "logrotate", "release": "19.el7", "source": "rpm", "version": "3.8.6" } ], "lshw": [ { "arch": "x86_64", "epoch": null, "name": "lshw", "release": "0.1.20180614git028f6b2.beaker.1.el7bkr.1", "source": "rpm", "version": "B.02.19" } ], "lsof": [ { "arch": "x86_64", "epoch": null, "name": "lsof", "release": "6.el7", "source": "rpm", "version": "4.87" } ], "lsscsi": [ { "arch": "x86_64", "epoch": null, "name": "lsscsi", "release": "6.el7", "source": "rpm", "version": "0.27" } ], "lua": [ { "arch": "x86_64", "epoch": null, "name": "lua", "release": "15.el7", "source": "rpm", "version": "5.1.4" } ], "lvm2": [ { "arch": "x86_64", "epoch": 7, "name": "lvm2", "release": "6.el7_9.5", "source": "rpm", "version": "2.02.187" } ], "lvm2-libs": [ { "arch": "x86_64", "epoch": 7, "name": "lvm2-libs", "release": "6.el7_9.5", "source": "rpm", "version": "2.02.187" } ], "lz4": [ { "arch": "x86_64", "epoch": null, "name": "lz4", "release": "1.el7", "source": "rpm", "version": "1.8.3" } ], "lzo": [ { "arch": "x86_64", "epoch": null, "name": "lzo", "release": "8.el7", "source": "rpm", "version": "2.06" } ], "mailcap": [ { "arch": "noarch", "epoch": null, "name": "mailcap", "release": "2.el7", "source": "rpm", "version": "2.1.41" } ], "make": [ { "arch": "x86_64", "epoch": 1, "name": "make", "release": "24.el7", "source": "rpm", "version": "3.82" } ], "man-db": [ { "arch": "x86_64", "epoch": null, "name": "man-db", "release": "11.el7", "source": "rpm", "version": "2.6.3" } ], "mariadb-libs": [ { "arch": "x86_64", "epoch": 1, "name": "mariadb-libs", "release": "1.el7", "source": "rpm", "version": "5.5.68" } ], "mdadm": [ { "arch": "x86_64", "epoch": null, "name": "mdadm", "release": "9.el7_9", "source": "rpm", "version": "4.1" } ], "microcode_ctl": [ { "arch": "x86_64", "epoch": 2, "name": "microcode_ctl", "release": "73.20.el7_9", "source": "rpm", "version": "2.1" } ], "mokutil": [ { "arch": "x86_64", "epoch": null, "name": "mokutil", "release": "8.el7", "source": "rpm", "version": "15" } ], "mozjs17": [ { "arch": "x86_64", "epoch": null, "name": "mozjs17", "release": "20.el7", "source": "rpm", "version": "17.0.0" } ], "mpfr": [ { "arch": "x86_64", "epoch": null, "name": "mpfr", "release": "4.el7", "source": "rpm", "version": "3.1.1" } ], "ncurses": [ { "arch": "x86_64", "epoch": null, "name": "ncurses", "release": "14.20130511.el7_4", "source": "rpm", "version": "5.9" } ], "ncurses-base": [ { "arch": "noarch", "epoch": null, "name": "ncurses-base", "release": "14.20130511.el7_4", "source": "rpm", "version": "5.9" } ], "ncurses-libs": [ { "arch": "x86_64", "epoch": null, "name": "ncurses-libs", "release": "14.20130511.el7_4", "source": "rpm", "version": "5.9" } ], "net-tools": [ { "arch": "x86_64", "epoch": null, "name": "net-tools", "release": "0.25.20131004git.el7", "source": "rpm", "version": "2.0" } ], "newt": [ { "arch": "x86_64", "epoch": null, "name": "newt", "release": "4.el7", "source": "rpm", "version": "0.52.15" } ], "newt-python": [ { "arch": "x86_64", "epoch": null, "name": "newt-python", "release": "4.el7", "source": "rpm", "version": "0.52.15" } ], "nfs-utils": [ { "arch": "x86_64", "epoch": 1, "name": "nfs-utils", "release": "0.68.el7.2", "source": "rpm", "version": "1.3.0" } ], "nspr": [ { "arch": "x86_64", "epoch": null, "name": "nspr", "release": "1.el7_9", "source": "rpm", "version": "4.35.0" } ], "nss": [ { "arch": "x86_64", "epoch": null, "name": "nss", "release": "2.el7_9", "source": "rpm", "version": "3.90.0" } ], "nss-pem": [ { "arch": "x86_64", "epoch": null, "name": "nss-pem", "release": "7.el7_9.1", "source": "rpm", "version": "1.0.3" } ], "nss-softokn": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn", "release": "6.el7_9", "source": "rpm", "version": "3.90.0" } ], "nss-softokn-freebl": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn-freebl", "release": "6.el7_9", "source": "rpm", "version": "3.90.0" } ], "nss-sysinit": [ { "arch": "x86_64", "epoch": null, "name": "nss-sysinit", "release": "2.el7_9", "source": "rpm", "version": "3.90.0" } ], "nss-tools": [ { "arch": "x86_64", "epoch": null, "name": "nss-tools", "release": "2.el7_9", "source": "rpm", "version": "3.90.0" } ], "nss-util": [ { "arch": "x86_64", "epoch": null, "name": "nss-util", "release": "1.el7_9", "source": "rpm", "version": "3.90.0" } ], "numactl-libs": [ { "arch": "x86_64", "epoch": null, "name": "numactl-libs", "release": "5.el7", "source": "rpm", "version": "2.0.12" } ], "openldap": [ { "arch": "x86_64", "epoch": null, "name": "openldap", "release": "25.el7_9", "source": "rpm", "version": "2.4.44" } ], "openssh": [ { "arch": "x86_64", "epoch": null, "name": "openssh", "release": "23.el7_9", "source": "rpm", "version": "7.4p1" } ], "openssh-clients": [ { "arch": "x86_64", "epoch": null, "name": "openssh-clients", "release": "23.el7_9", "source": "rpm", "version": "7.4p1" } ], "openssh-server": [ { "arch": "x86_64", "epoch": null, "name": "openssh-server", "release": "23.el7_9", "source": "rpm", "version": "7.4p1" } ], "openssl": [ { "arch": "x86_64", "epoch": 1, "name": "openssl", "release": "26.el7_9", "source": "rpm", "version": "1.0.2k" } ], "openssl-libs": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-libs", "release": "26.el7_9", "source": "rpm", "version": "1.0.2k" } ], "os-prober": [ { "arch": "x86_64", "epoch": null, "name": "os-prober", "release": "9.el7", "source": "rpm", "version": "1.58" } ], "p11-kit": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit", "release": "3.el7", "source": "rpm", "version": "0.23.5" } ], "p11-kit-trust": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit-trust", "release": "3.el7", "source": "rpm", "version": "0.23.5" } ], "pam": [ { "arch": "x86_64", "epoch": null, "name": "pam", "release": "23.el7", "source": "rpm", "version": "1.1.8" } ], "parted": [ { "arch": "x86_64", "epoch": null, "name": "parted", "release": "32.el7", "source": "rpm", "version": "3.1" } ], "passwd": [ { "arch": "x86_64", "epoch": null, "name": "passwd", "release": "6.el7", "source": "rpm", "version": "0.79" } ], "pciutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "pciutils-libs", "release": "3.el7", "source": "rpm", "version": "3.5.1" } ], "pcre": [ { "arch": "x86_64", "epoch": null, "name": "pcre", "release": "17.el7", "source": "rpm", "version": "8.32" } ], "perl": [ { "arch": "x86_64", "epoch": 4, "name": "perl", "release": "299.el7_9", "source": "rpm", "version": "5.16.3" } ], "perl-Business-ISBN": [ { "arch": "noarch", "epoch": null, "name": "perl-Business-ISBN", "release": "2.el7", "source": "rpm", "version": "2.06" } ], "perl-Business-ISBN-Data": [ { "arch": "noarch", "epoch": null, "name": "perl-Business-ISBN-Data", "release": "2.el7", "source": "rpm", "version": "20120719.001" } ], "perl-CGI": [ { "arch": "noarch", "epoch": null, "name": "perl-CGI", "release": "4.el7", "source": "rpm", "version": "3.63" } ], "perl-Carp": [ { "arch": "noarch", "epoch": null, "name": "perl-Carp", "release": "244.el7", "source": "rpm", "version": "1.26" } ], "perl-Compress-Raw-Bzip2": [ { "arch": "x86_64", "epoch": null, "name": "perl-Compress-Raw-Bzip2", "release": "3.el7", "source": "rpm", "version": "2.061" } ], "perl-Compress-Raw-Zlib": [ { "arch": "x86_64", "epoch": 1, "name": "perl-Compress-Raw-Zlib", "release": "4.el7", "source": "rpm", "version": "2.061" } ], "perl-Data-Dumper": [ { "arch": "x86_64", "epoch": null, "name": "perl-Data-Dumper", "release": "3.el7", "source": "rpm", "version": "2.145" } ], "perl-Date-Manip": [ { "arch": "noarch", "epoch": null, "name": "perl-Date-Manip", "release": "2.el7", "source": "rpm", "version": "6.41" } ], "perl-Digest": [ { "arch": "noarch", "epoch": null, "name": "perl-Digest", "release": "245.el7", "source": "rpm", "version": "1.17" } ], "perl-Digest-MD5": [ { "arch": "x86_64", "epoch": null, "name": "perl-Digest-MD5", "release": "3.el7", "source": "rpm", "version": "2.52" } ], "perl-Encode": [ { "arch": "x86_64", "epoch": null, "name": "perl-Encode", "release": "7.el7", "source": "rpm", "version": "2.51" } ], "perl-Encode-Locale": [ { "arch": "noarch", "epoch": null, "name": "perl-Encode-Locale", "release": "5.el7", "source": "rpm", "version": "1.03" } ], "perl-Error": [ { "arch": "noarch", "epoch": 1, "name": "perl-Error", "release": "2.el7", "source": "rpm", "version": "0.17020" } ], "perl-Exporter": [ { "arch": "noarch", "epoch": null, "name": "perl-Exporter", "release": "3.el7", "source": "rpm", "version": "5.68" } ], "perl-FCGI": [ { "arch": "x86_64", "epoch": 1, "name": "perl-FCGI", "release": "8.el7", "source": "rpm", "version": "0.74" } ], "perl-File-Listing": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Listing", "release": "7.el7", "source": "rpm", "version": "6.04" } ], "perl-File-Path": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Path", "release": "2.el7", "source": "rpm", "version": "2.09" } ], "perl-File-Temp": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Temp", "release": "3.el7", "source": "rpm", "version": "0.23.01" } ], "perl-Filter": [ { "arch": "x86_64", "epoch": null, "name": "perl-Filter", "release": "3.el7", "source": "rpm", "version": "1.49" } ], "perl-Font-AFM": [ { "arch": "noarch", "epoch": null, "name": "perl-Font-AFM", "release": "13.el7", "source": "rpm", "version": "1.20" } ], "perl-Getopt-Long": [ { "arch": "noarch", "epoch": null, "name": "perl-Getopt-Long", "release": "3.el7", "source": "rpm", "version": "2.40" } ], "perl-Git": [ { "arch": "noarch", "epoch": null, "name": "perl-Git", "release": "25.el7_9", "source": "rpm", "version": "1.8.3.1" } ], "perl-HTML-Format": [ { "arch": "noarch", "epoch": null, "name": "perl-HTML-Format", "release": "7.el7", "source": "rpm", "version": "2.10" } ], "perl-HTML-Parser": [ { "arch": "x86_64", "epoch": null, "name": "perl-HTML-Parser", "release": "4.el7", "source": "rpm", "version": "3.71" } ], "perl-HTML-Tagset": [ { "arch": "noarch", "epoch": null, "name": "perl-HTML-Tagset", "release": "15.el7", "source": "rpm", "version": "3.20" } ], "perl-HTML-Tree": [ { "arch": "noarch", "epoch": 1, "name": "perl-HTML-Tree", "release": "2.el7", "source": "rpm", "version": "5.03" } ], "perl-HTTP-Cookies": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Cookies", "release": "5.el7", "source": "rpm", "version": "6.01" } ], "perl-HTTP-Daemon": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Daemon", "release": "8.el7", "source": "rpm", "version": "6.01" } ], "perl-HTTP-Date": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Date", "release": "8.el7", "source": "rpm", "version": "6.02" } ], "perl-HTTP-Message": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Message", "release": "6.el7", "source": "rpm", "version": "6.06" } ], "perl-HTTP-Negotiate": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Negotiate", "release": "5.el7", "source": "rpm", "version": "6.01" } ], "perl-HTTP-Tiny": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Tiny", "release": "3.el7", "source": "rpm", "version": "0.033" } ], "perl-IO-Compress": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Compress", "release": "2.el7", "source": "rpm", "version": "2.061" } ], "perl-IO-HTML": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-HTML", "release": "2.el7", "source": "rpm", "version": "1.00" } ], "perl-IO-Socket-IP": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-IP", "release": "5.el7", "source": "rpm", "version": "0.21" } ], "perl-IO-Socket-SSL": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-SSL", "release": "7.el7", "source": "rpm", "version": "1.94" } ], "perl-IO-Zlib": [ { "arch": "noarch", "epoch": 1, "name": "perl-IO-Zlib", "release": "299.el7_9", "source": "rpm", "version": "1.10" } ], "perl-IO-stringy": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-stringy", "release": "22.el7", "source": "rpm", "version": "2.110" } ], "perl-LWP-MediaTypes": [ { "arch": "noarch", "epoch": null, "name": "perl-LWP-MediaTypes", "release": "2.el7", "source": "rpm", "version": "6.02" } ], "perl-LWP-Protocol-https": [ { "arch": "noarch", "epoch": null, "name": "perl-LWP-Protocol-https", "release": "4.el7", "source": "rpm", "version": "6.04" } ], "perl-Module-Load": [ { "arch": "noarch", "epoch": 1, "name": "perl-Module-Load", "release": "3.el7", "source": "rpm", "version": "0.24" } ], "perl-Mozilla-CA": [ { "arch": "noarch", "epoch": null, "name": "perl-Mozilla-CA", "release": "5.el7", "source": "rpm", "version": "20130114" } ], "perl-Net-HTTP": [ { "arch": "noarch", "epoch": null, "name": "perl-Net-HTTP", "release": "2.el7", "source": "rpm", "version": "6.06" } ], "perl-Net-LibIDN": [ { "arch": "x86_64", "epoch": null, "name": "perl-Net-LibIDN", "release": "15.el7", "source": "rpm", "version": "0.12" } ], "perl-Net-SSLeay": [ { "arch": "x86_64", "epoch": null, "name": "perl-Net-SSLeay", "release": "6.el7", "source": "rpm", "version": "1.55" } ], "perl-PathTools": [ { "arch": "x86_64", "epoch": null, "name": "perl-PathTools", "release": "5.el7", "source": "rpm", "version": "3.40" } ], "perl-Pod-Escapes": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Escapes", "release": "299.el7_9", "source": "rpm", "version": "1.04" } ], "perl-Pod-Perldoc": [ { "arch": "noarch", "epoch": null, "name": "perl-Pod-Perldoc", "release": "4.el7", "source": "rpm", "version": "3.20" } ], "perl-Pod-Simple": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Simple", "release": "4.el7", "source": "rpm", "version": "3.28" } ], "perl-Pod-Usage": [ { "arch": "noarch", "epoch": null, "name": "perl-Pod-Usage", "release": "3.el7", "source": "rpm", "version": "1.63" } ], "perl-Scalar-List-Utils": [ { "arch": "x86_64", "epoch": null, "name": "perl-Scalar-List-Utils", "release": "248.el7", "source": "rpm", "version": "1.27" } ], "perl-Socket": [ { "arch": "x86_64", "epoch": null, "name": "perl-Socket", "release": "5.el7", "source": "rpm", "version": "2.010" } ], "perl-Storable": [ { "arch": "x86_64", "epoch": null, "name": "perl-Storable", "release": "3.el7", "source": "rpm", "version": "2.45" } ], "perl-TermReadKey": [ { "arch": "x86_64", "epoch": null, "name": "perl-TermReadKey", "release": "20.el7", "source": "rpm", "version": "2.30" } ], "perl-Text-ParseWords": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-ParseWords", "release": "4.el7", "source": "rpm", "version": "3.29" } ], "perl-Time-HiRes": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Time-HiRes", "release": "3.el7", "source": "rpm", "version": "1.9725" } ], "perl-Time-Local": [ { "arch": "noarch", "epoch": null, "name": "perl-Time-Local", "release": "2.el7", "source": "rpm", "version": "1.2300" } ], "perl-TimeDate": [ { "arch": "noarch", "epoch": 1, "name": "perl-TimeDate", "release": "2.el7", "source": "rpm", "version": "2.30" } ], "perl-URI": [ { "arch": "noarch", "epoch": null, "name": "perl-URI", "release": "9.el7", "source": "rpm", "version": "1.60" } ], "perl-WWW-RobotRules": [ { "arch": "noarch", "epoch": null, "name": "perl-WWW-RobotRules", "release": "5.el7", "source": "rpm", "version": "6.02" } ], "perl-XML-LibXML": [ { "arch": "x86_64", "epoch": 1, "name": "perl-XML-LibXML", "release": "5.el7", "source": "rpm", "version": "2.0018" } ], "perl-XML-NamespaceSupport": [ { "arch": "noarch", "epoch": null, "name": "perl-XML-NamespaceSupport", "release": "10.el7", "source": "rpm", "version": "1.11" } ], "perl-XML-Parser": [ { "arch": "x86_64", "epoch": null, "name": "perl-XML-Parser", "release": "10.el7", "source": "rpm", "version": "2.41" } ], "perl-XML-SAX": [ { "arch": "noarch", "epoch": null, "name": "perl-XML-SAX", "release": "9.el7", "source": "rpm", "version": "0.99" } ], "perl-XML-SAX-Base": [ { "arch": "noarch", "epoch": null, "name": "perl-XML-SAX-Base", "release": "7.el7", "source": "rpm", "version": "1.08" } ], "perl-XML-Twig": [ { "arch": "noarch", "epoch": null, "name": "perl-XML-Twig", "release": "2.el7", "source": "rpm", "version": "3.44" } ], "perl-constant": [ { "arch": "noarch", "epoch": null, "name": "perl-constant", "release": "2.el7", "source": "rpm", "version": "1.27" } ], "perl-libs": [ { "arch": "x86_64", "epoch": 4, "name": "perl-libs", "release": "299.el7_9", "source": "rpm", "version": "5.16.3" } ], "perl-libwww-perl": [ { "arch": "noarch", "epoch": null, "name": "perl-libwww-perl", "release": "2.el7", "source": "rpm", "version": "6.05" } ], "perl-macros": [ { "arch": "x86_64", "epoch": 4, "name": "perl-macros", "release": "299.el7_9", "source": "rpm", "version": "5.16.3" } ], "perl-parent": [ { "arch": "noarch", "epoch": 1, "name": "perl-parent", "release": "244.el7", "source": "rpm", "version": "0.225" } ], "perl-podlators": [ { "arch": "noarch", "epoch": null, "name": "perl-podlators", "release": "3.el7", "source": "rpm", "version": "2.5.1" } ], "perl-threads": [ { "arch": "x86_64", "epoch": null, "name": "perl-threads", "release": "4.el7", "source": "rpm", "version": "1.87" } ], "perl-threads-shared": [ { "arch": "x86_64", "epoch": null, "name": "perl-threads-shared", "release": "6.el7", "source": "rpm", "version": "1.43" } ], "pinentry": [ { "arch": "x86_64", "epoch": null, "name": "pinentry", "release": "17.el7", "source": "rpm", "version": "0.8.1" } ], "pkgconfig": [ { "arch": "x86_64", "epoch": 1, "name": "pkgconfig", "release": "4.el7", "source": "rpm", "version": "0.27.1" } ], "plymouth": [ { "arch": "x86_64", "epoch": null, "name": "plymouth", "release": "0.34.20140113.el7.centos", "source": "rpm", "version": "0.8.9" } ], "plymouth-core-libs": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-core-libs", "release": "0.34.20140113.el7.centos", "source": "rpm", "version": "0.8.9" } ], "plymouth-scripts": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-scripts", "release": "0.34.20140113.el7.centos", "source": "rpm", "version": "0.8.9" } ], "policycoreutils": [ { "arch": "x86_64", "epoch": null, "name": "policycoreutils", "release": "34.el7", "source": "rpm", "version": "2.5" } ], "policycoreutils-python": [ { "arch": "x86_64", "epoch": null, "name": "policycoreutils-python", "release": "34.el7", "source": "rpm", "version": "2.5" } ], "polkit": [ { "arch": "x86_64", "epoch": null, "name": "polkit", "release": "26.el7_9.1", "source": "rpm", "version": "0.112" } ], "polkit-pkla-compat": [ { "arch": "x86_64", "epoch": null, "name": "polkit-pkla-compat", "release": "4.el7", "source": "rpm", "version": "0.1" } ], "popt": [ { "arch": "x86_64", "epoch": null, "name": "popt", "release": "16.el7", "source": "rpm", "version": "1.13" } ], "postfix": [ { "arch": "x86_64", "epoch": 2, "name": "postfix", "release": "9.el7", "source": "rpm", "version": "2.10.1" } ], "procps-ng": [ { "arch": "x86_64", "epoch": null, "name": "procps-ng", "release": "28.el7", "source": "rpm", "version": "3.3.10" } ], "pth": [ { "arch": "x86_64", "epoch": null, "name": "pth", "release": "23.el7", "source": "rpm", "version": "2.0.7" } ], "pygpgme": [ { "arch": "x86_64", "epoch": null, "name": "pygpgme", "release": "9.el7", "source": "rpm", "version": "0.3" } ], "pyliblzma": [ { "arch": "x86_64", "epoch": null, "name": "pyliblzma", "release": "11.el7", "source": "rpm", "version": "0.5.3" } ], "pyparsing": [ { "arch": "noarch", "epoch": null, "name": "pyparsing", "release": "9.el7", "source": "rpm", "version": "1.5.6" } ], "pyparted": [ { "arch": "x86_64", "epoch": 1, "name": "pyparted", "release": "15.el7", "source": "rpm", "version": "3.9" } ], "pyserial": [ { "arch": "noarch", "epoch": null, "name": "pyserial", "release": "6.el7", "source": "rpm", "version": "2.6" } ], "python": [ { "arch": "x86_64", "epoch": null, "name": "python", "release": "94.el7_9", "source": "rpm", "version": "2.7.5" } ], "python-IPy": [ { "arch": "noarch", "epoch": null, "name": "python-IPy", "release": "6.el7", "source": "rpm", "version": "0.75" } ], "python-babel": [ { "arch": "noarch", "epoch": null, "name": "python-babel", "release": "8.el7", "source": "rpm", "version": "0.9.6" } ], "python-backports": [ { "arch": "x86_64", "epoch": null, "name": "python-backports", "release": "8.el7", "source": "rpm", "version": "1.0" } ], "python-backports-ssl_match_hostname": [ { "arch": "noarch", "epoch": null, "name": "python-backports-ssl_match_hostname", "release": "1.el7", "source": "rpm", "version": "3.5.0.1" } ], "python-chardet": [ { "arch": "noarch", "epoch": null, "name": "python-chardet", "release": "3.el7", "source": "rpm", "version": "2.2.1" } ], "python-configobj": [ { "arch": "noarch", "epoch": null, "name": "python-configobj", "release": "7.el7", "source": "rpm", "version": "4.7.2" } ], "python-configshell": [ { "arch": "noarch", "epoch": 1, "name": "python-configshell", "release": "1.el7", "source": "rpm", "version": "1.1.26" } ], "python-decorator": [ { "arch": "noarch", "epoch": null, "name": "python-decorator", "release": "3.el7", "source": "rpm", "version": "3.4.0" } ], "python-enum34": [ { "arch": "noarch", "epoch": null, "name": "python-enum34", "release": "1.el7", "source": "rpm", "version": "1.0.4" } ], "python-ethtool": [ { "arch": "x86_64", "epoch": null, "name": "python-ethtool", "release": "8.el7", "source": "rpm", "version": "0.8" } ], "python-firewall": [ { "arch": "noarch", "epoch": null, "name": "python-firewall", "release": "13.el7_9", "source": "rpm", "version": "0.6.3" } ], "python-gobject-base": [ { "arch": "x86_64", "epoch": null, "name": "python-gobject-base", "release": "1.el7_4.1", "source": "rpm", "version": "3.22.0" } ], "python-iniparse": [ { "arch": "noarch", "epoch": null, "name": "python-iniparse", "release": "9.el7", "source": "rpm", "version": "0.4" } ], "python-ipaddress": [ { "arch": "noarch", "epoch": null, "name": "python-ipaddress", "release": "2.el7", "source": "rpm", "version": "1.0.16" } ], "python-jinja2": [ { "arch": "noarch", "epoch": null, "name": "python-jinja2", "release": "4.el7", "source": "rpm", "version": "2.7.2" } ], "python-jsonpatch": [ { "arch": "noarch", "epoch": null, "name": "python-jsonpatch", "release": "4.el7", "source": "rpm", "version": "1.2" } ], "python-jsonpointer": [ { "arch": "noarch", "epoch": null, "name": "python-jsonpointer", "release": "2.el7", "source": "rpm", "version": "1.9" } ], "python-kitchen": [ { "arch": "noarch", "epoch": null, "name": "python-kitchen", "release": "5.el7", "source": "rpm", "version": "1.1.1" } ], "python-kmod": [ { "arch": "x86_64", "epoch": null, "name": "python-kmod", "release": "4.el7", "source": "rpm", "version": "0.9" } ], "python-libs": [ { "arch": "x86_64", "epoch": null, "name": "python-libs", "release": "94.el7_9", "source": "rpm", "version": "2.7.5" } ], "python-linux-procfs": [ { "arch": "noarch", "epoch": null, "name": "python-linux-procfs", "release": "4.el7", "source": "rpm", "version": "0.4.11" } ], "python-lxml": [ { "arch": "x86_64", "epoch": null, "name": "python-lxml", "release": "4.el7", "source": "rpm", "version": "3.2.1" } ], "python-markupsafe": [ { "arch": "x86_64", "epoch": null, "name": "python-markupsafe", "release": "10.el7", "source": "rpm", "version": "0.11" } ], "python-perf": [ { "arch": "x86_64", "epoch": null, "name": "python-perf", "release": "1160.119.1.el7", "source": "rpm", "version": "3.10.0" } ], "python-prettytable": [ { "arch": "noarch", "epoch": null, "name": "python-prettytable", "release": "3.el7", "source": "rpm", "version": "0.7.2" } ], "python-pycurl": [ { "arch": "x86_64", "epoch": null, "name": "python-pycurl", "release": "19.el7", "source": "rpm", "version": "7.19.0" } ], "python-pyudev": [ { "arch": "noarch", "epoch": null, "name": "python-pyudev", "release": "9.el7", "source": "rpm", "version": "0.15" } ], "python-requests": [ { "arch": "noarch", "epoch": null, "name": "python-requests", "release": "10.el7", "source": "rpm", "version": "2.6.0" } ], "python-rtslib": [ { "arch": "noarch", "epoch": null, "name": "python-rtslib", "release": "1.el7_9", "source": "rpm", "version": "2.1.74" } ], "python-schedutils": [ { "arch": "x86_64", "epoch": null, "name": "python-schedutils", "release": "6.el7", "source": "rpm", "version": "0.4" } ], "python-setuptools": [ { "arch": "noarch", "epoch": null, "name": "python-setuptools", "release": "7.el7", "source": "rpm", "version": "0.9.8" } ], "python-six": [ { "arch": "noarch", "epoch": null, "name": "python-six", "release": "2.el7", "source": "rpm", "version": "1.9.0" } ], "python-slip": [ { "arch": "noarch", "epoch": null, "name": "python-slip", "release": "4.el7", "source": "rpm", "version": "0.4.0" } ], "python-slip-dbus": [ { "arch": "noarch", "epoch": null, "name": "python-slip-dbus", "release": "4.el7", "source": "rpm", "version": "0.4.0" } ], "python-urlgrabber": [ { "arch": "noarch", "epoch": null, "name": "python-urlgrabber", "release": "10.el7", "source": "rpm", "version": "3.10" } ], "python-urllib3": [ { "arch": "noarch", "epoch": null, "name": "python-urllib3", "release": "7.el7", "source": "rpm", "version": "1.10.2" } ], "python-urwid": [ { "arch": "x86_64", "epoch": null, "name": "python-urwid", "release": "3.el7", "source": "rpm", "version": "1.1.1" } ], "python2-blivet3": [ { "arch": "noarch", "epoch": 1, "name": "python2-blivet3", "release": "3.el7", "source": "rpm", "version": "3.1.3" } ], "python2-blockdev": [ { "arch": "x86_64", "epoch": null, "name": "python2-blockdev", "release": "5.el7", "source": "rpm", "version": "2.18" } ], "python2-bytesize": [ { "arch": "x86_64", "epoch": null, "name": "python2-bytesize", "release": "1.el7", "source": "rpm", "version": "1.2" } ], "python2-hawkey": [ { "arch": "x86_64", "epoch": null, "name": "python2-hawkey", "release": "2.el7_9", "source": "rpm", "version": "0.22.5" } ], "python2-libdnf": [ { "arch": "x86_64", "epoch": null, "name": "python2-libdnf", "release": "2.el7_9", "source": "rpm", "version": "0.22.5" } ], "pyxattr": [ { "arch": "x86_64", "epoch": null, "name": "pyxattr", "release": "5.el7", "source": "rpm", "version": "0.5.1" } ], "qa-tools": [ { "arch": "noarch", "epoch": null, "name": "qa-tools", "release": "4.el7_9", "source": "rpm", "version": "4.1" } ], "qemu-guest-agent": [ { "arch": "x86_64", "epoch": 10, "name": "qemu-guest-agent", "release": "3.el7", "source": "rpm", "version": "2.12.0" } ], "qrencode-libs": [ { "arch": "x86_64", "epoch": null, "name": "qrencode-libs", "release": "3.el7", "source": "rpm", "version": "3.4.1" } ], "quota": [ { "arch": "x86_64", "epoch": 1, "name": "quota", "release": "19.el7", "source": "rpm", "version": "4.01" } ], "quota-nls": [ { "arch": "noarch", "epoch": 1, "name": "quota-nls", "release": "19.el7", "source": "rpm", "version": "4.01" } ], "readline": [ { "arch": "x86_64", "epoch": null, "name": "readline", "release": "11.el7", "source": "rpm", "version": "6.2" } ], "restraint": [ { "arch": "x86_64", "epoch": null, "name": "restraint", "release": "1.el7bkr", "source": "rpm", "version": "0.4.4" } ], "restraint-rhts": [ { "arch": "x86_64", "epoch": null, "name": "restraint-rhts", "release": "1.el7bkr", "source": "rpm", "version": "0.4.4" } ], "rng-tools": [ { "arch": "x86_64", "epoch": null, "name": "rng-tools", "release": "5.el7", "source": "rpm", "version": "6.3.1" } ], "rootfiles": [ { "arch": "noarch", "epoch": null, "name": "rootfiles", "release": "11.el7", "source": "rpm", "version": "8.1" } ], "rpcbind": [ { "arch": "x86_64", "epoch": null, "name": "rpcbind", "release": "49.el7", "source": "rpm", "version": "0.2.0" } ], "rpm": [ { "arch": "x86_64", "epoch": null, "name": "rpm", "release": "48.el7_9", "source": "rpm", "version": "4.11.3" } ], "rpm-build-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-build-libs", "release": "48.el7_9", "source": "rpm", "version": "4.11.3" } ], "rpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-libs", "release": "48.el7_9", "source": "rpm", "version": "4.11.3" } ], "rpm-python": [ { "arch": "x86_64", "epoch": null, "name": "rpm-python", "release": "48.el7_9", "source": "rpm", "version": "4.11.3" } ], "rsync": [ { "arch": "x86_64", "epoch": null, "name": "rsync", "release": "12.el7_9", "source": "rpm", "version": "3.1.2" } ], "rsyslog": [ { "arch": "x86_64", "epoch": null, "name": "rsyslog", "release": "57.el7_9.3", "source": "rpm", "version": "8.24.0" } ], "ruby": [ { "arch": "x86_64", "epoch": null, "name": "ruby", "release": "39.el7_9", "source": "rpm", "version": "2.0.0.648" } ], "ruby-irb": [ { "arch": "noarch", "epoch": null, "name": "ruby-irb", "release": "39.el7_9", "source": "rpm", "version": "2.0.0.648" } ], "ruby-libs": [ { "arch": "x86_64", "epoch": null, "name": "ruby-libs", "release": "39.el7_9", "source": "rpm", "version": "2.0.0.648" } ], "rubygem-bigdecimal": [ { "arch": "x86_64", "epoch": null, "name": "rubygem-bigdecimal", "release": "39.el7_9", "source": "rpm", "version": "1.2.0" } ], "rubygem-io-console": [ { "arch": "x86_64", "epoch": null, "name": "rubygem-io-console", "release": "39.el7_9", "source": "rpm", "version": "0.4.2" } ], "rubygem-json": [ { "arch": "x86_64", "epoch": null, "name": "rubygem-json", "release": "39.el7_9", "source": "rpm", "version": "1.7.7" } ], "rubygem-psych": [ { "arch": "x86_64", "epoch": null, "name": "rubygem-psych", "release": "39.el7_9", "source": "rpm", "version": "2.0.0" } ], "rubygem-rdoc": [ { "arch": "noarch", "epoch": null, "name": "rubygem-rdoc", "release": "39.el7_9", "source": "rpm", "version": "4.0.0" } ], "rubygems": [ { "arch": "noarch", "epoch": null, "name": "rubygems", "release": "39.el7_9", "source": "rpm", "version": "2.0.14.1" } ], "sed": [ { "arch": "x86_64", "epoch": null, "name": "sed", "release": "7.el7", "source": "rpm", "version": "4.2.2" } ], "selinux-policy": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy", "release": "268.el7_9.2", "source": "rpm", "version": "3.13.1" } ], "selinux-policy-targeted": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy-targeted", "release": "268.el7_9.2", "source": "rpm", "version": "3.13.1" } ], "setools-libs": [ { "arch": "x86_64", "epoch": null, "name": "setools-libs", "release": "4.el7", "source": "rpm", "version": "3.3.8" } ], "setup": [ { "arch": "noarch", "epoch": null, "name": "setup", "release": "11.el7", "source": "rpm", "version": "2.8.71" } ], "sg3_utils": [ { "arch": "x86_64", "epoch": 1, "name": "sg3_utils", "release": "19.el7", "source": "rpm", "version": "1.37" } ], "sg3_utils-libs": [ { "arch": "x86_64", "epoch": 1, "name": "sg3_utils-libs", "release": "19.el7", "source": "rpm", "version": "1.37" } ], "sgpio": [ { "arch": "x86_64", "epoch": null, "name": "sgpio", "release": "13.el7", "source": "rpm", "version": "1.2.0.10" } ], "shadow-utils": [ { "arch": "x86_64", "epoch": 2, "name": "shadow-utils", "release": "5.el7", "source": "rpm", "version": "4.6" } ], "shared-mime-info": [ { "arch": "x86_64", "epoch": null, "name": "shared-mime-info", "release": "5.el7", "source": "rpm", "version": "1.8" } ], "slang": [ { "arch": "x86_64", "epoch": null, "name": "slang", "release": "11.el7", "source": "rpm", "version": "2.2.4" } ], "snappy": [ { "arch": "x86_64", "epoch": null, "name": "snappy", "release": "3.el7", "source": "rpm", "version": "1.1.0" } ], "sqlite": [ { "arch": "x86_64", "epoch": null, "name": "sqlite", "release": "8.el7_7.1", "source": "rpm", "version": "3.7.17" } ], "strace": [ { "arch": "x86_64", "epoch": null, "name": "strace", "release": "7.el7_9", "source": "rpm", "version": "4.24" } ], "sudo": [ { "arch": "x86_64", "epoch": null, "name": "sudo", "release": "10.el7_9.3", "source": "rpm", "version": "1.8.23" } ], "systemd": [ { "arch": "x86_64", "epoch": null, "name": "systemd", "release": "78.el7_9.9", "source": "rpm", "version": "219" } ], "systemd-libs": [ { "arch": "x86_64", "epoch": null, "name": "systemd-libs", "release": "78.el7_9.9", "source": "rpm", "version": "219" } ], "systemd-sysv": [ { "arch": "x86_64", "epoch": null, "name": "systemd-sysv", "release": "78.el7_9.9", "source": "rpm", "version": "219" } ], "systemtap": [ { "arch": "x86_64", "epoch": null, "name": "systemtap", "release": "13.el7", "source": "rpm", "version": "4.0" } ], "systemtap-client": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-client", "release": "13.el7", "source": "rpm", "version": "4.0" } ], "systemtap-devel": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-devel", "release": "13.el7", "source": "rpm", "version": "4.0" } ], "systemtap-runtime": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-runtime", "release": "13.el7", "source": "rpm", "version": "4.0" } ], "sysvinit-tools": [ { "arch": "x86_64", "epoch": null, "name": "sysvinit-tools", "release": "14.dsf.el7", "source": "rpm", "version": "2.88" } ], "tar": [ { "arch": "x86_64", "epoch": 2, "name": "tar", "release": "35.el7", "source": "rpm", "version": "1.26" } ], "targetcli": [ { "arch": "noarch", "epoch": null, "name": "targetcli", "release": "1.el7_9", "source": "rpm", "version": "2.1.53" } ], "tcp_wrappers": [ { "arch": "x86_64", "epoch": null, "name": "tcp_wrappers", "release": "77.el7", "source": "rpm", "version": "7.6" } ], "tcp_wrappers-libs": [ { "arch": "x86_64", "epoch": null, "name": "tcp_wrappers-libs", "release": "77.el7", "source": "rpm", "version": "7.6" } ], "teamd": [ { "arch": "x86_64", "epoch": null, "name": "teamd", "release": "3.el7", "source": "rpm", "version": "1.29" } ], "time": [ { "arch": "x86_64", "epoch": null, "name": "time", "release": "45.el7", "source": "rpm", "version": "1.7" } ], "tps-devel": [ { "arch": "noarch", "epoch": null, "name": "tps-devel", "release": "1", "source": "rpm", "version": "2.44.50" } ], "tuned": [ { "arch": "noarch", "epoch": null, "name": "tuned", "release": "12.el7_9", "source": "rpm", "version": "2.11.0" } ], "tzdata": [ { "arch": "noarch", "epoch": null, "name": "tzdata", "release": "1.el7", "source": "rpm", "version": "2024a" } ], "unzip": [ { "arch": "x86_64", "epoch": null, "name": "unzip", "release": "24.el7_9", "source": "rpm", "version": "6.0" } ], "ustr": [ { "arch": "x86_64", "epoch": null, "name": "ustr", "release": "16.el7", "source": "rpm", "version": "1.0.4" } ], "util-linux": [ { "arch": "x86_64", "epoch": null, "name": "util-linux", "release": "65.el7_9.1", "source": "rpm", "version": "2.23.2" } ], "vim-common": [ { "arch": "x86_64", "epoch": 2, "name": "vim-common", "release": "8.el7_9", "source": "rpm", "version": "7.4.629" } ], "vim-enhanced": [ { "arch": "x86_64", "epoch": 2, "name": "vim-enhanced", "release": "8.el7_9", "source": "rpm", "version": "7.4.629" } ], "vim-filesystem": [ { "arch": "x86_64", "epoch": 2, "name": "vim-filesystem", "release": "8.el7_9", "source": "rpm", "version": "7.4.629" } ], "vim-minimal": [ { "arch": "x86_64", "epoch": 2, "name": "vim-minimal", "release": "8.el7_9", "source": "rpm", "version": "7.4.629" } ], "virt-what": [ { "arch": "x86_64", "epoch": null, "name": "virt-what", "release": "4.el7_9.1", "source": "rpm", "version": "1.18" } ], "volume_key-libs": [ { "arch": "x86_64", "epoch": null, "name": "volume_key-libs", "release": "9.el7", "source": "rpm", "version": "0.3.9" } ], "wget": [ { "arch": "x86_64", "epoch": null, "name": "wget", "release": "18.el7_6.1", "source": "rpm", "version": "1.14" } ], "which": [ { "arch": "x86_64", "epoch": null, "name": "which", "release": "7.el7", "source": "rpm", "version": "2.20" } ], "wpa_supplicant": [ { "arch": "x86_64", "epoch": 1, "name": "wpa_supplicant", "release": "12.el7_9.2", "source": "rpm", "version": "2.6" } ], "xfsprogs": [ { "arch": "x86_64", "epoch": null, "name": "xfsprogs", "release": "22.el7", "source": "rpm", "version": "4.5.0" } ], "xz": [ { "arch": "x86_64", "epoch": null, "name": "xz", "release": "2.el7_9", "source": "rpm", "version": "5.2.2" } ], "xz-libs": [ { "arch": "x86_64", "epoch": null, "name": "xz-libs", "release": "2.el7_9", "source": "rpm", "version": "5.2.2" } ], "yum": [ { "arch": "noarch", "epoch": null, "name": "yum", "release": "168.el7.centos", "source": "rpm", "version": "3.4.3" } ], "yum-metadata-parser": [ { "arch": "x86_64", "epoch": null, "name": "yum-metadata-parser", "release": "10.el7", "source": "rpm", "version": "1.1.4" } ], "yum-plugin-fastestmirror": [ { "arch": "noarch", "epoch": null, "name": "yum-plugin-fastestmirror", "release": "54.el7_8", "source": "rpm", "version": "1.1.31" } ], "yum-utils": [ { "arch": "noarch", "epoch": null, "name": "yum-utils", "release": "54.el7_8", "source": "rpm", "version": "1.1.31" } ], "zip": [ { "arch": "x86_64", "epoch": null, "name": "zip", "release": "11.el7", "source": "rpm", "version": "3.0" } ], "zlib": [ { "arch": "x86_64", "epoch": null, "name": "zlib", "release": "21.el7_9", "source": "rpm", "version": "1.2.7" } ] } }, "changed": false } TASK [Set blivet package name] ************************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:352 Wednesday 30 July 2025 21:41:24 -0400 (0:00:01.706) 0:14:15.751 ******** ok: [managed-node13] => { "ansible_facts": { "blivet_pkg_name": [ "python2-blivet3" ] }, "changed": false } TASK [Set blivet package version] ********************************************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:356 Wednesday 30 July 2025 21:41:24 -0400 (0:00:00.097) 0:14:15.848 ******** ok: [managed-node13] => { "ansible_facts": { "blivet_pkg_version": "3.1.3-3.el7" }, "changed": false } TASK [Set distribution version] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:360 Wednesday 30 July 2025 21:41:24 -0400 (0:00:00.095) 0:14:15.944 ******** ok: [managed-node13] => { "ansible_facts": { "is_fedora": false, "is_rhel10": false, "is_rhel8": false, "is_rhel9": false }, "changed": false } TASK [Create one LVM logical volume under one volume group with size 5g] ******* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:373 Wednesday 30 July 2025 21:41:24 -0400 (0:00:00.141) 0:14:16.085 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 15] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:389 Wednesday 30 July 2025 21:41:24 -0400 (0:00:00.070) 0:14:16.156 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Change volume_size to after size 9g] ************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:392 Wednesday 30 July 2025 21:41:24 -0400 (0:00:00.122) 0:14:16.278 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 16] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:406 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.073) 0:14:16.352 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Test for correct handling of offline resize in safe mode] **************** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:409 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.072) 0:14:16.424 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up - 4] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:427 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.072) 0:14:16.497 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 17] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:440 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.073) 0:14:16.570 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Create a LVM logical volume with for XFS size 5g] ************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:450 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.071) 0:14:16.642 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 18] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:466 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.073) 0:14:16.715 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Change again volume size to after size 9g] ******************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:469 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.074) 0:14:16.790 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 19] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:483 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.083) 0:14:16.873 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Repeat for idempotency test] ********************************************* task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:486 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.075) 0:14:16.948 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 20] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:500 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.071) 0:14:17.020 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Test for correct handling of shrinking (not supported by XFS)] *********** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:503 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.068) 0:14:17.089 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Try to resize LVM volume size to disk size - 1.5 % (less than 2 % than minimum size should be tolerated)] *** task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:521 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.072) 0:14:17.161 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 21] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:536 Wednesday 30 July 2025 21:41:25 -0400 (0:00:00.074) 0:14:17.235 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up - 5] ************************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:540 Wednesday 30 July 2025 21:41:26 -0400 (0:00:00.068) 0:14:17.304 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify role results - 22] ************************************************ task path: /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:553 Wednesday 30 July 2025 21:41:26 -0400 (0:00:00.069) 0:14:17.373 ******** skipping: [managed-node13] => { "changed": false, "skip_reason": "Conditional result was False" } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* managed-node13 : ok=1441 changed=26 unreachable=0 failed=3 skipped=1631 rescued=3 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:34:43.236749Z", "host": "managed-node13", "message": "volume 'test1' cannot be resized to '12 GiB'", "start_time": "2025-07-31T01:34:38.121763Z", "task_name": "Manage the pools and volumes to match the specified state", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70" }, { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:34:43.627307Z", "host": "managed-node13", "message": { "_ansible_no_log": false, "actions": [], "changed": false, "crypts": [], "failed": true, "invocation": { "module_args": { "disklabel_type": null, "diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "packages_only": false, "pool_defaults": { "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "grow_to_fill": false, "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [] }, "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "12884901888.0", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "safe_mode": false, "use_partitions": null, "uses_kmod_kvdo": true, "volume_defaults": { "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 0, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, "volumes": [] } }, "leaves": [], "mounts": [], "msg": "volume 'test1' cannot be resized to '12 GiB'", "packages": [], "pools": [], "volumes": [] }, "start_time": "2025-07-31T01:34:43.361714Z", "task_name": "Failed message", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111" }, { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:37:29.197675Z", "host": "managed-node13", "message": "invalid size specification 'xyz GiB' in pool 'foo'", "start_time": "2025-07-31T01:37:24.350080Z", "task_name": "Manage the pools and volumes to match the specified state", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70" }, { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:37:29.424261Z", "host": "managed-node13", "message": { "_ansible_no_log": false, "actions": [], "changed": false, "crypts": [], "failed": true, "invocation": { "module_args": { "disklabel_type": null, "diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "packages_only": false, "pool_defaults": { "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "grow_to_fill": false, "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [] }, "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "cache_devices": [], "cache_mode": null, "cache_size": null, "cached": null, "compression": null, "deduplication": null, "encryption": null, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": null, "fs_label": null, "fs_type": "ext4", "mount_group": null, "mount_mode": null, "mount_options": null, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_disks": [], "raid_level": null, "raid_stripe_size": null, "size": "xyz GiB", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": null, "vdo_pool_size": null } ] } ], "safe_mode": false, "use_partitions": null, "uses_kmod_kvdo": true, "volume_defaults": { "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 0, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, "volumes": [] } }, "leaves": [], "mounts": [], "msg": "invalid size specification 'xyz GiB' in pool 'foo'", "packages": [], "pools": [], "volumes": [] }, "start_time": "2025-07-31T01:37:29.308151Z", "task_name": "Failed message", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111" }, { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:37:40.005954Z", "host": "managed-node13", "message": "invalid size specification 'none' in pool 'foo'", "start_time": "2025-07-31T01:37:35.121529Z", "task_name": "Manage the pools and volumes to match the specified state", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70" }, { "ansible_version": "2.9.27", "end_time": "2025-07-31T01:37:40.195556Z", "host": "managed-node13", "message": { "_ansible_no_log": false, "actions": [], "changed": false, "crypts": [], "failed": true, "invocation": { "module_args": { "disklabel_type": null, "diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "packages_only": false, "pool_defaults": { "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "grow_to_fill": false, "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [] }, "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "cache_devices": [], "cache_mode": null, "cache_size": null, "cached": null, "compression": null, "deduplication": null, "encryption": null, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": null, "fs_label": null, "fs_type": "ext4", "mount_group": null, "mount_mode": null, "mount_options": null, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_disks": [], "raid_level": null, "raid_stripe_size": null, "size": "none", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": null, "vdo_pool_size": null } ] } ], "safe_mode": false, "use_partitions": null, "uses_kmod_kvdo": true, "volume_defaults": { "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": 0, "state": "present", "thin": null, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, "volumes": [] } }, "leaves": [], "mounts": [], "msg": "invalid size specification 'none' in pool 'foo'", "packages": [], "pools": [], "volumes": [] }, "start_time": "2025-07-31T01:37:40.071911Z", "task_name": "Failed message", "task_path": "/tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:111" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Wednesday 30 July 2025 21:41:26 -0400 (0:00:00.025) 0:14:17.399 ******** =============================================================================== fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 7.47s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 7.15s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.28s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.23s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.61s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Gathering Facts --------------------------------------------------------- 5.59s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/tests/storage/tests_resize.yml:2 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.47s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.39s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.25s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.24s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.15s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Make sure blivet is available ------- 5.14s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.12s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.96s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.95s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.79s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.79s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.79s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 4.68s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.64s /tmp/collections-iml/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70