ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = [u'/root/.ansible/plugins/modules', u'/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python2.7/site-packages/ansible executable location = /usr/bin/ansible-playbook python version = 2.7.5 (default, Nov 14 2023, 16:14:06) [GCC 4.8.5 20150623 (Red Hat 4.8.5-44)] Using /etc/ansible/ansible.cfg as config file [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_change_fs.yml ************************************************** 1 plays in /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml PLAY [Test change fs] ********************************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:2 Sunday 06 July 2025 11:56:03 -0400 (0:00:00.237) 0:00:00.237 *********** ok: [managed-node2] META: ran handlers TASK [Run the role] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:14 Sunday 06 July 2025 11:56:06 -0400 (0:00:02.889) 0:00:03.127 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 11:56:06 -0400 (0:00:00.229) 0:00:03.357 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 11:56:07 -0400 (0:00:00.609) 0:00:03.966 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 11:56:07 -0400 (0:00:00.282) 0:00:04.249 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 11:56:08 -0400 (0:00:00.401) 0:00:04.651 *********** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 11:56:09 -0400 (0:00:01.101) 0:00:05.752 *********** ok: [managed-node2] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 11:56:09 -0400 (0:00:00.220) 0:00:05.973 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 11:56:09 -0400 (0:00:00.159) 0:00:06.132 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 11:56:09 -0400 (0:00:00.151) 0:00:06.284 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 11:56:10 -0400 (0:00:00.633) 0:00:06.917 *********** changed: [managed-node2] => { "changed": true, "changes": { "installed": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "libblockdev" ] }, "rc": 0, "results": [ "Loaded plugins: fastestmirror\nLoading mirror speeds from cached hostfile\n * epel: d2lzkl7pfhq30w.cloudfront.net\n * epel-debuginfo: d2lzkl7pfhq30w.cloudfront.net\n * epel-source: d2lzkl7pfhq30w.cloudfront.net\nResolving Dependencies\n--> Running transaction check\n---> Package libblockdev.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libblockdev-utils(x86-64) = 2.18-5.el7 for package: libblockdev-2.18-5.el7.x86_64\n--> Processing Dependency: libbd_utils.so.2()(64bit) for package: libblockdev-2.18-5.el7.x86_64\n---> Package libblockdev-crypto.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libvolume_key.so.1()(64bit) for package: libblockdev-crypto-2.18-5.el7.x86_64\n---> Package libblockdev-dm.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libdmraid.so.1(Base)(64bit) for package: libblockdev-dm-2.18-5.el7.x86_64\n--> Processing Dependency: dmraid for package: libblockdev-dm-2.18-5.el7.x86_64\n--> Processing Dependency: libdmraid.so.1()(64bit) for package: libblockdev-dm-2.18-5.el7.x86_64\n---> Package libblockdev-lvm.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: lvm2 for package: libblockdev-lvm-2.18-5.el7.x86_64\n--> Processing Dependency: device-mapper-persistent-data for package: libblockdev-lvm-2.18-5.el7.x86_64\n---> Package libblockdev-mdraid.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: mdadm for package: libblockdev-mdraid-2.18-5.el7.x86_64\n--> Processing Dependency: libbytesize.so.1()(64bit) for package: libblockdev-mdraid-2.18-5.el7.x86_64\n---> Package libblockdev-swap.x86_64 0:2.18-5.el7 will be installed\n---> Package python-enum34.noarch 0:1.0.4-1.el7 will be installed\n---> Package python2-blivet3.noarch 1:3.1.3-3.el7 will be installed\n--> Processing Dependency: blivet3-data = 1:3.1.3-3.el7 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: python2-bytesize >= 0.3 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: python2-blockdev >= 2.17 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: pyparted >= 3.9 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: python2-hawkey for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: lsof for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Running transaction check\n---> Package blivet3-data.noarch 1:3.1.3-3.el7 will be installed\n---> Package device-mapper-persistent-data.x86_64 0:0.8.5-3.el7_9.2 will be installed\n--> Processing Dependency: libaio.so.1(LIBAIO_0.4)(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n--> Processing Dependency: libaio.so.1(LIBAIO_0.1)(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n--> Processing Dependency: libaio.so.1()(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n---> Package dmraid.x86_64 0:1.0.0.rc16-28.el7 will be installed\n--> Processing Dependency: libdevmapper-event.so.1.02(Base)(64bit) for package: dmraid-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: dmraid-events for package: dmraid-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: libdevmapper-event.so.1.02()(64bit) for package: dmraid-1.0.0.rc16-28.el7.x86_64\n---> Package libblockdev-utils.x86_64 0:2.18-5.el7 will be installed\n---> Package libbytesize.x86_64 0:1.2-1.el7 will be installed\n---> Package lsof.x86_64 0:4.87-6.el7 will be installed\n---> Package lvm2.x86_64 7:2.02.187-6.el7_9.5 will be installed\n--> Processing Dependency: lvm2-libs = 7:2.02.187-6.el7_9.5 for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n--> Processing Dependency: liblvm2app.so.2.2(Base)(64bit) for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n--> Processing Dependency: liblvm2app.so.2.2()(64bit) for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n---> Package mdadm.x86_64 0:4.1-9.el7_9 will be installed\n--> Processing Dependency: libreport-filesystem for package: mdadm-4.1-9.el7_9.x86_64\n---> Package pyparted.x86_64 1:3.9-15.el7 will be installed\n---> Package python2-blockdev.x86_64 0:2.18-5.el7 will be installed\n---> Package python2-bytesize.x86_64 0:1.2-1.el7 will be installed\n---> Package python2-hawkey.x86_64 0:0.22.5-2.el7_9 will be installed\n--> Processing Dependency: python2-libdnf = 0.22.5-2.el7_9 for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libdnf(x86-64) = 0.22.5-2.el7_9 for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libsolvext.so.0(SOLV_1.0)(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libsolv.so.0(SOLV_1.0)(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libsolvext.so.0()(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libsolv.so.0()(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: librepo.so.0()(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libmodulemd.so.1()(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n--> Processing Dependency: libdnf.so.2()(64bit) for package: python2-hawkey-0.22.5-2.el7_9.x86_64\n---> Package volume_key-libs.x86_64 0:0.3.9-9.el7 will be installed\n--> Running transaction check\n---> Package device-mapper-event-libs.x86_64 7:1.02.170-6.el7_9.5 will be installed\n---> Package dmraid-events.x86_64 0:1.0.0.rc16-28.el7 will be installed\n--> Processing Dependency: sgpio for package: dmraid-events-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: device-mapper-event for package: dmraid-events-1.0.0.rc16-28.el7.x86_64\n---> Package libaio.x86_64 0:0.3.109-13.el7 will be installed\n---> Package libdnf.x86_64 0:0.22.5-2.el7_9 will be installed\n---> Package libmodulemd.x86_64 0:1.6.3-1.el7 will be installed\n---> Package librepo.x86_64 0:1.8.1-8.el7_9 will be installed\n---> Package libreport-filesystem.x86_64 0:2.1.11-53.el7.centos will be installed\n---> Package libsolv.x86_64 0:0.6.34-4.el7 will be installed\n---> Package lvm2-libs.x86_64 7:2.02.187-6.el7_9.5 will be installed\n---> Package python2-libdnf.x86_64 0:0.22.5-2.el7_9 will be installed\n--> Running transaction check\n---> Package device-mapper-event.x86_64 7:1.02.170-6.el7_9.5 will be installed\n---> Package sgpio.x86_64 0:1.2.0.10-13.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository\n Size\n================================================================================\nInstalling:\n libblockdev x86_64 2.18-5.el7 base 119 k\n libblockdev-crypto x86_64 2.18-5.el7 base 60 k\n libblockdev-dm x86_64 2.18-5.el7 base 54 k\n libblockdev-lvm x86_64 2.18-5.el7 base 62 k\n libblockdev-mdraid x86_64 2.18-5.el7 base 57 k\n libblockdev-swap x86_64 2.18-5.el7 base 52 k\n python-enum34 noarch 1.0.4-1.el7 base 52 k\n python2-blivet3 noarch 1:3.1.3-3.el7 base 851 k\nInstalling for dependencies:\n blivet3-data noarch 1:3.1.3-3.el7 base 77 k\n device-mapper-event x86_64 7:1.02.170-6.el7_9.5 updates 192 k\n device-mapper-event-libs x86_64 7:1.02.170-6.el7_9.5 updates 192 k\n device-mapper-persistent-data x86_64 0.8.5-3.el7_9.2 updates 423 k\n dmraid x86_64 1.0.0.rc16-28.el7 base 151 k\n dmraid-events x86_64 1.0.0.rc16-28.el7 base 21 k\n libaio x86_64 0.3.109-13.el7 base 24 k\n libblockdev-utils x86_64 2.18-5.el7 base 58 k\n libbytesize x86_64 1.2-1.el7 base 52 k\n libdnf x86_64 0.22.5-2.el7_9 extras 535 k\n libmodulemd x86_64 1.6.3-1.el7 extras 141 k\n librepo x86_64 1.8.1-8.el7_9 updates 82 k\n libreport-filesystem x86_64 2.1.11-53.el7.centos base 41 k\n libsolv x86_64 0.6.34-4.el7 base 329 k\n lsof x86_64 4.87-6.el7 base 331 k\n lvm2 x86_64 7:2.02.187-6.el7_9.5 updates 1.3 M\n lvm2-libs x86_64 7:2.02.187-6.el7_9.5 updates 1.1 M\n mdadm x86_64 4.1-9.el7_9 updates 439 k\n pyparted x86_64 1:3.9-15.el7 base 195 k\n python2-blockdev x86_64 2.18-5.el7 base 61 k\n python2-bytesize x86_64 1.2-1.el7 base 22 k\n python2-hawkey x86_64 0.22.5-2.el7_9 extras 71 k\n python2-libdnf x86_64 0.22.5-2.el7_9 extras 611 k\n sgpio x86_64 1.2.0.10-13.el7 base 13 k\n volume_key-libs x86_64 0.3.9-9.el7 base 141 k\n\nTransaction Summary\n================================================================================\nInstall 8 Packages (+25 Dependent packages)\n\nTotal download size: 7.8 M\nInstalled size: 23 M\nDownloading packages:\n--------------------------------------------------------------------------------\nTotal 6.5 MB/s | 7.8 MB 00:01 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : libblockdev-utils-2.18-5.el7.x86_64 1/33 \n Installing : 7:device-mapper-event-libs-1.02.170-6.el7_9.5.x86_64 2/33 \n Installing : libsolv-0.6.34-4.el7.x86_64 3/33 \n Installing : libaio-0.3.109-13.el7.x86_64 4/33 \n Installing : librepo-1.8.1-8.el7_9.x86_64 5/33 \n Installing : libmodulemd-1.6.3-1.el7.x86_64 6/33 \n Installing : libdnf-0.22.5-2.el7_9.x86_64 7/33 \n Installing : device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64 8/33 \n Installing : 7:device-mapper-event-1.02.170-6.el7_9.5.x86_64 9/33 \n Installing : libbytesize-1.2-1.el7.x86_64 10/33 \n Installing : python2-bytesize-1.2-1.el7.x86_64 11/33 \n Installing : 7:lvm2-libs-2.02.187-6.el7_9.5.x86_64 12/33 \n Installing : 7:lvm2-2.02.187-6.el7_9.5.x86_64 13/33 \n Installing : python2-libdnf-0.22.5-2.el7_9.x86_64 14/33 \n Installing : python2-hawkey-0.22.5-2.el7_9.x86_64 15/33 \n Installing : libblockdev-2.18-5.el7.x86_64 16/33 \n Installing : python2-blockdev-2.18-5.el7.x86_64 17/33 \n Installing : 1:pyparted-3.9-15.el7.x86_64 18/33 \n Installing : sgpio-1.2.0.10-13.el7.x86_64 19/33 \n Installing : dmraid-1.0.0.rc16-28.el7.x86_64 20/33 \n Installing : dmraid-events-1.0.0.rc16-28.el7.x86_64 21/33 \n Installing : volume_key-libs-0.3.9-9.el7.x86_64 22/33 \n Installing : libreport-filesystem-2.1.11-53.el7.centos.x86_64 23/33 \n Installing : mdadm-4.1-9.el7_9.x86_64 24/33 \n Installing : 1:blivet3-data-3.1.3-3.el7.noarch 25/33 \n Installing : lsof-4.87-6.el7.x86_64 26/33 \n Installing : 1:python2-blivet3-3.1.3-3.el7.noarch 27/33 \n Installing : libblockdev-mdraid-2.18-5.el7.x86_64 28/33 \n Installing : libblockdev-crypto-2.18-5.el7.x86_64 29/33 \n Installing : libblockdev-dm-2.18-5.el7.x86_64 30/33 \n Installing : libblockdev-lvm-2.18-5.el7.x86_64 31/33 \n Installing : libblockdev-swap-2.18-5.el7.x86_64 32/33 \n Installing : python-enum34-1.0.4-1.el7.noarch 33/33 \n Verifying : 7:device-mapper-event-1.02.170-6.el7_9.5.x86_64 1/33 \n Verifying : libblockdev-swap-2.18-5.el7.x86_64 2/33 \n Verifying : libblockdev-lvm-2.18-5.el7.x86_64 3/33 \n Verifying : lsof-4.87-6.el7.x86_64 4/33 \n Verifying : libblockdev-mdraid-2.18-5.el7.x86_64 5/33 \n Verifying : libdnf-0.22.5-2.el7_9.x86_64 6/33 \n Verifying : python-enum34-1.0.4-1.el7.noarch 7/33 \n Verifying : 1:blivet3-data-3.1.3-3.el7.noarch 8/33 \n Verifying : dmraid-events-1.0.0.rc16-28.el7.x86_64 9/33 \n Verifying : python2-blockdev-2.18-5.el7.x86_64 10/33 \n Verifying : libmodulemd-1.6.3-1.el7.x86_64 11/33 \n Verifying : librepo-1.8.1-8.el7_9.x86_64 12/33 \n Verifying : libblockdev-dm-2.18-5.el7.x86_64 13/33 \n Verifying : libaio-0.3.109-13.el7.x86_64 14/33 \n Verifying : libreport-filesystem-2.1.11-53.el7.centos.x86_64 15/33 \n Verifying : 7:lvm2-libs-2.02.187-6.el7_9.5.x86_64 16/33 \n Verifying : python2-hawkey-0.22.5-2.el7_9.x86_64 17/33 \n Verifying : python2-bytesize-1.2-1.el7.x86_64 18/33 \n Verifying : libblockdev-2.18-5.el7.x86_64 19/33 \n Verifying : libbytesize-1.2-1.el7.x86_64 20/33 \n Verifying : 7:device-mapper-event-libs-1.02.170-6.el7_9.5.x86_64 21/33 \n Verifying : python2-libdnf-0.22.5-2.el7_9.x86_64 22/33 \n Verifying : 7:lvm2-2.02.187-6.el7_9.5.x86_64 23/33 \n Verifying : libblockdev-utils-2.18-5.el7.x86_64 24/33 \n Verifying : volume_key-libs-0.3.9-9.el7.x86_64 25/33 \n Verifying : libsolv-0.6.34-4.el7.x86_64 26/33 \n Verifying : device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64 27/33 \n Verifying : 1:python2-blivet3-3.1.3-3.el7.noarch 28/33 \n Verifying : dmraid-1.0.0.rc16-28.el7.x86_64 29/33 \n Verifying : mdadm-4.1-9.el7_9.x86_64 30/33 \n Verifying : sgpio-1.2.0.10-13.el7.x86_64 31/33 \n Verifying : libblockdev-crypto-2.18-5.el7.x86_64 32/33 \n Verifying : 1:pyparted-3.9-15.el7.x86_64 33/33 \n\nInstalled:\n libblockdev.x86_64 0:2.18-5.el7 libblockdev-crypto.x86_64 0:2.18-5.el7\n libblockdev-dm.x86_64 0:2.18-5.el7 libblockdev-lvm.x86_64 0:2.18-5.el7 \n libblockdev-mdraid.x86_64 0:2.18-5.el7 libblockdev-swap.x86_64 0:2.18-5.el7 \n python-enum34.noarch 0:1.0.4-1.el7 python2-blivet3.noarch 1:3.1.3-3.el7 \n\nDependency Installed:\n blivet3-data.noarch 1:3.1.3-3.el7 \n device-mapper-event.x86_64 7:1.02.170-6.el7_9.5 \n device-mapper-event-libs.x86_64 7:1.02.170-6.el7_9.5 \n device-mapper-persistent-data.x86_64 0:0.8.5-3.el7_9.2 \n dmraid.x86_64 0:1.0.0.rc16-28.el7 \n dmraid-events.x86_64 0:1.0.0.rc16-28.el7 \n libaio.x86_64 0:0.3.109-13.el7 \n libblockdev-utils.x86_64 0:2.18-5.el7 \n libbytesize.x86_64 0:1.2-1.el7 \n libdnf.x86_64 0:0.22.5-2.el7_9 \n libmodulemd.x86_64 0:1.6.3-1.el7 \n librepo.x86_64 0:1.8.1-8.el7_9 \n libreport-filesystem.x86_64 0:2.1.11-53.el7.centos \n libsolv.x86_64 0:0.6.34-4.el7 \n lsof.x86_64 0:4.87-6.el7 \n lvm2.x86_64 7:2.02.187-6.el7_9.5 \n lvm2-libs.x86_64 7:2.02.187-6.el7_9.5 \n mdadm.x86_64 0:4.1-9.el7_9 \n pyparted.x86_64 1:3.9-15.el7 \n python2-blockdev.x86_64 0:2.18-5.el7 \n python2-bytesize.x86_64 0:1.2-1.el7 \n python2-hawkey.x86_64 0:0.22.5-2.el7_9 \n python2-libdnf.x86_64 0:0.22.5-2.el7_9 \n sgpio.x86_64 0:1.2.0.10-13.el7 \n volume_key-libs.x86_64 0:0.3.9-9.el7 \n\nComplete!\n" ] } lsrpackages: libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 11:56:21 -0400 (0:00:10.689) 0:00:17.606 *********** ok: [managed-node2] => { "storage_pools | d([])": [] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 11:56:21 -0400 (0:00:00.432) 0:00:18.039 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 11:56:22 -0400 (0:00:00.395) 0:00:18.434 *********** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 11:56:24 -0400 (0:00:02.610) 0:00:21.045 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Sunday 06 July 2025 11:56:25 -0400 (0:00:00.733) 0:00:21.779 *********** TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Sunday 06 July 2025 11:56:25 -0400 (0:00:00.321) 0:00:22.101 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Sunday 06 July 2025 11:56:25 -0400 (0:00:00.263) 0:00:22.364 *********** TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 11:56:26 -0400 (0:00:00.144) 0:00:22.509 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 11:56:28 -0400 (0:00:02.543) 0:00:25.052 *********** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "unknown" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 11:56:32 -0400 (0:00:03.745) 0:00:28.798 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 11:56:32 -0400 (0:00:00.297) 0:00:29.096 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 11:56:32 -0400 (0:00:00.258) 0:00:29.354 *********** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 11:56:34 -0400 (0:00:02.011) 0:00:31.365 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 11:56:35 -0400 (0:00:00.408) 0:00:31.774 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751816985.9470944, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "4db69458c23204aa354c1fce8c724ba0713d6623", "ctime": 1718881114.40265, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131078, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1718881114.40265, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1207, "uid": 0, "version": "18446744072852913878", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 11:56:37 -0400 (0:00:01.810) 0:00:33.584 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 11:56:37 -0400 (0:00:00.452) 0:00:34.037 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 11:56:37 -0400 (0:00:00.274) 0:00:34.311 *********** ok: [managed-node2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 11:56:38 -0400 (0:00:00.451) 0:00:34.763 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 11:56:38 -0400 (0:00:00.343) 0:00:35.107 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 11:56:39 -0400 (0:00:00.429) 0:00:35.536 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 11:56:39 -0400 (0:00:00.244) 0:00:35.780 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 11:56:39 -0400 (0:00:00.528) 0:00:36.309 *********** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 11:56:40 -0400 (0:00:00.282) 0:00:36.591 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 11:56:40 -0400 (0:00:00.261) 0:00:36.852 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 11:56:40 -0400 (0:00:00.343) 0:00:37.196 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 11:56:42 -0400 (0:00:01.490) 0:00:38.687 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 11:56:42 -0400 (0:00:00.160) 0:00:38.848 *********** ok: [managed-node2] TASK [Mark tasks to be skipped] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:18 Sunday 06 July 2025 11:56:45 -0400 (0:00:02.589) 0:00:41.437 *********** ok: [managed-node2] => { "ansible_facts": { "storage_skip_checks": [ "blivet_available", "packages_installed", "service_facts" ] }, "changed": false } TASK [Get unused disks] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:25 Sunday 06 July 2025 11:56:45 -0400 (0:00:00.419) 0:00:41.857 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node2 TASK [Ensure test packages] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 Sunday 06 July 2025 11:56:46 -0400 (0:00:00.640) 0:00:42.497 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } lsrpackages: util-linux TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 Sunday 06 July 2025 11:56:49 -0400 (0:00:03.497) 0:00:45.995 *********** ok: [managed-node2] => { "changed": false, "disks": [ "sda" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"ext4\" LOG-SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'fstype': '', 'type': 'disk', 'ssize': '512', 'size': '268435456000'}] has partitions" ] } TASK [Debug why there are no unused disks] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20 Sunday 06 July 2025 11:56:52 -0400 (0:00:03.152) 0:00:49.148 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29 Sunday 06 July 2025 11:56:53 -0400 (0:00:00.477) 0:00:49.626 *********** ok: [managed-node2] => { "ansible_facts": { "unused_disks": [ "sda" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34 Sunday 06 July 2025 11:56:53 -0400 (0:00:00.371) 0:00:49.997 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39 Sunday 06 July 2025 11:56:54 -0400 (0:00:00.466) 0:00:50.464 *********** ok: [managed-node2] => { "unused_disks": [ "sda" ] } TASK [Create a LVM logical volume with default fs_type] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:31 Sunday 06 July 2025 11:56:54 -0400 (0:00:00.396) 0:00:50.860 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 11:56:55 -0400 (0:00:00.817) 0:00:51.678 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 11:56:55 -0400 (0:00:00.628) 0:00:52.306 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 11:56:56 -0400 (0:00:00.576) 0:00:52.883 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 11:56:57 -0400 (0:00:00.700) 0:00:53.583 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 11:56:57 -0400 (0:00:00.186) 0:00:53.769 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 11:56:57 -0400 (0:00:00.274) 0:00:54.044 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 11:56:58 -0400 (0:00:00.489) 0:00:54.534 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 11:56:58 -0400 (0:00:00.368) 0:00:54.902 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 11:56:59 -0400 (0:00:00.734) 0:00:55.637 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 11:56:59 -0400 (0:00:00.280) 0:00:55.917 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 11:57:00 -0400 (0:00:00.492) 0:00:56.409 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 11:57:00 -0400 (0:00:00.265) 0:00:56.675 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 11:57:00 -0400 (0:00:00.400) 0:00:57.076 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 11:57:01 -0400 (0:00:00.478) 0:00:57.555 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 11:57:01 -0400 (0:00:00.320) 0:00:57.875 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 11:57:02 -0400 (0:00:00.800) 0:00:58.675 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 11:57:02 -0400 (0:00:00.552) 0:00:59.228 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 11:57:03 -0400 (0:00:00.303) 0:00:59.531 *********** changed: [managed-node2] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 11:57:09 -0400 (0:00:06.288) 0:01:05.820 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 11:57:09 -0400 (0:00:00.437) 0:01:06.258 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751816985.9470944, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "4db69458c23204aa354c1fce8c724ba0713d6623", "ctime": 1718881114.40265, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131078, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1718881114.40265, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1207, "uid": 0, "version": "18446744072852913878", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 11:57:11 -0400 (0:00:02.028) 0:01:08.286 *********** changed: [managed-node2] => { "backup": "", "changed": true } MSG: line added TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 11:57:15 -0400 (0:00:03.474) 0:01:11.761 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 11:57:15 -0400 (0:00:00.417) 0:01:12.178 *********** ok: [managed-node2] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 11:57:16 -0400 (0:00:00.515) 0:01:12.694 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 11:57:16 -0400 (0:00:00.378) 0:01:13.073 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 11:57:17 -0400 (0:00:00.497) 0:01:13.571 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 11:57:17 -0400 (0:00:00.329) 0:01:13.901 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 11:57:22 -0400 (0:00:05.049) 0:01:18.951 *********** changed: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 11:57:25 -0400 (0:00:03.163) 0:01:22.115 *********** skipping: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 11:57:26 -0400 (0:00:00.575) 0:01:22.691 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 11:57:28 -0400 (0:00:02.082) 0:01:24.773 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 11:57:30 -0400 (0:00:01.833) 0:01:26.607 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 11:57:30 -0400 (0:00:00.504) 0:01:27.112 *********** ok: [managed-node2] TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:43 Sunday 06 July 2025 11:57:33 -0400 (0:00:02.800) 0:01:29.913 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 11:57:34 -0400 (0:00:01.003) 0:01:30.916 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 11:57:35 -0400 (0:00:00.521) 0:01:31.438 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 11:57:35 -0400 (0:00:00.409) 0:01:31.848 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "01cf4af9-d439-40f4-b908-4e9a581e4eed" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "vvI2TT-wUCo-l5pF-mSH6-RUks-12mX-gowx2Y" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 11:57:38 -0400 (0:00:03.040) 0:01:34.888 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002720", "end": "2025-07-06 11:57:41.053652", "rc": 0, "start": "2025-07-06 11:57:41.050932" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 11:57:41 -0400 (0:00:03.008) 0:01:37.897 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003387", "end": "2025-07-06 11:57:42.767914", "failed_when_result": false, "rc": 0, "start": "2025-07-06 11:57:42.764527" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 11:57:43 -0400 (0:00:01.684) 0:01:39.582 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 11:57:44 -0400 (0:00:01.270) 0:01:40.852 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 11:57:44 -0400 (0:00:00.254) 0:01:41.106 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.017108", "end": "2025-07-06 11:57:46.091079", "rc": 0, "start": "2025-07-06 11:57:46.073971" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 11:57:46 -0400 (0:00:01.793) 0:01:42.900 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 11:57:47 -0400 (0:00:00.628) 0:01:43.529 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 11:57:48 -0400 (0:00:01.068) 0:01:44.598 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 11:57:48 -0400 (0:00:00.449) 0:01:45.048 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 11:57:51 -0400 (0:00:02.900) 0:01:47.949 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 11:57:52 -0400 (0:00:00.471) 0:01:48.420 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 11:57:52 -0400 (0:00:00.395) 0:01:48.816 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 11:57:52 -0400 (0:00:00.534) 0:01:49.350 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 11:57:53 -0400 (0:00:00.521) 0:01:49.871 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 11:57:54 -0400 (0:00:00.536) 0:01:50.408 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 11:57:54 -0400 (0:00:00.591) 0:01:50.999 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 11:57:55 -0400 (0:00:00.586) 0:01:51.586 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 11:57:56 -0400 (0:00:01.555) 0:01:53.141 *********** skipping: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 11:57:57 -0400 (0:00:00.544) 0:01:53.686 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 11:57:58 -0400 (0:00:01.197) 0:01:54.884 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 11:57:58 -0400 (0:00:00.339) 0:01:55.223 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 11:57:59 -0400 (0:00:00.461) 0:01:55.685 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 11:57:59 -0400 (0:00:00.422) 0:01:56.107 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 11:58:00 -0400 (0:00:00.477) 0:01:56.585 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 11:58:00 -0400 (0:00:00.603) 0:01:57.189 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 11:58:01 -0400 (0:00:00.514) 0:01:57.703 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 11:58:01 -0400 (0:00:00.366) 0:01:58.070 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 11:58:02 -0400 (0:00:00.456) 0:01:58.526 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 11:58:02 -0400 (0:00:00.384) 0:01:58.910 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 11:58:02 -0400 (0:00:00.300) 0:01:59.211 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 11:58:03 -0400 (0:00:00.336) 0:01:59.547 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 11:58:04 -0400 (0:00:01.164) 0:02:00.712 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 11:58:05 -0400 (0:00:00.806) 0:02:01.519 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 11:58:05 -0400 (0:00:00.459) 0:02:01.979 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 11:58:05 -0400 (0:00:00.355) 0:02:02.334 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 11:58:06 -0400 (0:00:00.664) 0:02:02.999 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 11:58:06 -0400 (0:00:00.208) 0:02:03.207 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 11:58:07 -0400 (0:00:00.368) 0:02:03.575 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 11:58:07 -0400 (0:00:00.268) 0:02:03.844 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 11:58:07 -0400 (0:00:00.493) 0:02:04.338 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 11:58:09 -0400 (0:00:01.123) 0:02:05.461 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 11:58:10 -0400 (0:00:01.008) 0:02:06.470 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 11:58:10 -0400 (0:00:00.509) 0:02:06.980 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 11:58:11 -0400 (0:00:00.522) 0:02:07.503 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 11:58:11 -0400 (0:00:00.490) 0:02:07.993 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 11:58:12 -0400 (0:00:00.438) 0:02:08.431 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 11:58:12 -0400 (0:00:00.787) 0:02:09.219 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 11:58:13 -0400 (0:00:00.286) 0:02:09.506 *********** skipping: [managed-node2] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 11:58:13 -0400 (0:00:00.594) 0:02:10.101 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Sunday 06 July 2025 11:58:14 -0400 (0:00:00.847) 0:02:10.948 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Sunday 06 July 2025 11:58:15 -0400 (0:00:00.476) 0:02:11.425 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Sunday 06 July 2025 11:58:15 -0400 (0:00:00.553) 0:02:11.978 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Sunday 06 July 2025 11:58:15 -0400 (0:00:00.367) 0:02:12.346 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Sunday 06 July 2025 11:58:16 -0400 (0:00:00.351) 0:02:12.697 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Sunday 06 July 2025 11:58:16 -0400 (0:00:00.460) 0:02:13.158 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 11:58:17 -0400 (0:00:00.457) 0:02:13.616 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 11:58:17 -0400 (0:00:00.413) 0:02:14.029 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 11:58:18 -0400 (0:00:01.139) 0:02:15.168 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 11:58:19 -0400 (0:00:00.726) 0:02:15.895 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 11:58:19 -0400 (0:00:00.247) 0:02:16.142 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 11:58:20 -0400 (0:00:00.392) 0:02:16.535 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 11:58:20 -0400 (0:00:00.596) 0:02:17.131 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 11:58:21 -0400 (0:00:00.293) 0:02:17.425 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 11:58:21 -0400 (0:00:00.321) 0:02:17.746 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 11:58:21 -0400 (0:00:00.396) 0:02:18.142 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 11:58:22 -0400 (0:00:00.336) 0:02:18.479 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 11:58:23 -0400 (0:00:01.141) 0:02:19.620 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 11:58:23 -0400 (0:00:00.415) 0:02:20.035 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 11:58:24 -0400 (0:00:00.476) 0:02:20.511 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 11:58:24 -0400 (0:00:00.341) 0:02:20.937 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 11:58:25 -0400 (0:00:00.520) 0:02:21.458 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 11:58:25 -0400 (0:00:00.566) 0:02:22.024 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 11:58:26 -0400 (0:00:00.466) 0:02:22.490 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 11:58:26 -0400 (0:00:00.408) 0:02:22.898 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 11:58:26 -0400 (0:00:00.481) 0:02:23.380 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 11:58:27 -0400 (0:00:00.849) 0:02:24.230 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 11:58:28 -0400 (0:00:00.558) 0:02:24.788 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 11:58:31 -0400 (0:00:03.350) 0:02:28.139 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 11:58:32 -0400 (0:00:00.530) 0:02:28.669 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 11:58:32 -0400 (0:00:00.593) 0:02:29.263 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 11:58:33 -0400 (0:00:00.490) 0:02:29.754 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 11:58:33 -0400 (0:00:00.463) 0:02:30.217 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 11:58:34 -0400 (0:00:00.420) 0:02:30.638 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 11:58:34 -0400 (0:00:00.496) 0:02:31.134 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 11:58:35 -0400 (0:00:00.659) 0:02:31.794 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 11:58:35 -0400 (0:00:00.397) 0:02:32.192 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 11:58:36 -0400 (0:00:00.349) 0:02:32.541 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 11:58:36 -0400 (0:00:00.512) 0:02:33.054 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 11:58:37 -0400 (0:00:00.453) 0:02:33.508 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 11:58:37 -0400 (0:00:00.736) 0:02:34.244 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 11:58:38 -0400 (0:00:00.449) 0:02:34.694 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 11:58:38 -0400 (0:00:00.409) 0:02:35.103 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 11:58:39 -0400 (0:00:00.428) 0:02:35.531 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 11:58:39 -0400 (0:00:00.660) 0:02:36.192 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 11:58:40 -0400 (0:00:00.374) 0:02:36.566 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 11:58:40 -0400 (0:00:00.407) 0:02:36.974 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 11:58:40 -0400 (0:00:00.420) 0:02:37.395 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817428.7509246, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751817428.7509246, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 39930, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1751817428.7509246, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 11:58:42 -0400 (0:00:01.360) 0:02:38.756 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 11:58:42 -0400 (0:00:00.534) 0:02:39.291 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 11:58:43 -0400 (0:00:00.463) 0:02:39.754 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 11:58:43 -0400 (0:00:00.507) 0:02:40.262 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 11:58:44 -0400 (0:00:00.524) 0:02:40.786 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 11:58:44 -0400 (0:00:00.443) 0:02:41.230 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 11:58:45 -0400 (0:00:00.491) 0:02:41.722 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 11:58:45 -0400 (0:00:00.533) 0:02:42.255 *********** changed: [managed-node2] => { "changed": true, "changes": { "installed": [ "cryptsetup" ] }, "rc": 0, "results": [ "Loaded plugins: fastestmirror\nLoading mirror speeds from cached hostfile\n * epel: d2lzkl7pfhq30w.cloudfront.net\n * epel-debuginfo: d2lzkl7pfhq30w.cloudfront.net\n * epel-source: d2lzkl7pfhq30w.cloudfront.net\nResolving Dependencies\n--> Running transaction check\n---> Package cryptsetup.x86_64 0:2.0.3-6.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n cryptsetup x86_64 2.0.3-6.el7 base 154 k\n\nTransaction Summary\n================================================================================\nInstall 1 Package\n\nTotal download size: 154 k\nInstalled size: 354 k\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : cryptsetup-2.0.3-6.el7.x86_64 1/1 \n Verifying : cryptsetup-2.0.3-6.el7.x86_64 1/1 \n\nInstalled:\n cryptsetup.x86_64 0:2.0.3-6.el7 \n\nComplete!\n" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 11:58:51 -0400 (0:00:05.385) 0:02:47.641 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 11:58:51 -0400 (0:00:00.440) 0:02:48.082 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 11:58:52 -0400 (0:00:00.477) 0:02:48.559 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 11:58:53 -0400 (0:00:00.910) 0:02:49.470 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 11:58:53 -0400 (0:00:00.332) 0:02:49.803 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 11:58:53 -0400 (0:00:00.461) 0:02:50.264 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 11:58:54 -0400 (0:00:00.388) 0:02:50.653 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 11:58:54 -0400 (0:00:00.498) 0:02:51.151 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 11:58:55 -0400 (0:00:00.539) 0:02:51.690 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 11:58:55 -0400 (0:00:00.593) 0:02:52.283 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 11:58:56 -0400 (0:00:00.311) 0:02:52.594 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 11:58:56 -0400 (0:00:00.412) 0:02:53.007 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 11:58:57 -0400 (0:00:00.508) 0:02:53.516 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 11:58:57 -0400 (0:00:00.423) 0:02:53.939 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 11:58:58 -0400 (0:00:00.530) 0:02:54.470 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 11:58:58 -0400 (0:00:00.428) 0:02:54.898 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 11:58:58 -0400 (0:00:00.481) 0:02:55.379 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 11:58:59 -0400 (0:00:00.538) 0:02:55.918 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 11:59:00 -0400 (0:00:00.545) 0:02:56.463 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 11:59:00 -0400 (0:00:00.494) 0:02:56.957 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 11:59:00 -0400 (0:00:00.354) 0:02:57.312 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 11:59:01 -0400 (0:00:00.419) 0:02:57.731 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 11:59:01 -0400 (0:00:00.447) 0:02:58.178 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 11:59:02 -0400 (0:00:00.447) 0:02:58.626 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 11:59:02 -0400 (0:00:00.404) 0:02:59.030 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 11:59:05 -0400 (0:00:03.050) 0:03:02.080 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 11:59:07 -0400 (0:00:01.789) 0:03:03.870 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 11:59:08 -0400 (0:00:00.588) 0:03:04.459 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 11:59:08 -0400 (0:00:00.508) 0:03:04.967 *********** ok: [managed-node2] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 11:59:10 -0400 (0:00:01.932) 0:03:06.900 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 11:59:10 -0400 (0:00:00.389) 0:03:07.289 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 11:59:11 -0400 (0:00:00.493) 0:03:07.782 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 11:59:11 -0400 (0:00:00.415) 0:03:08.198 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 11:59:12 -0400 (0:00:00.566) 0:03:08.764 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 11:59:12 -0400 (0:00:00.333) 0:03:09.097 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 11:59:13 -0400 (0:00:00.307) 0:03:09.405 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 11:59:13 -0400 (0:00:00.463) 0:03:09.868 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 11:59:13 -0400 (0:00:00.490) 0:03:10.358 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 11:59:14 -0400 (0:00:00.518) 0:03:10.877 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 11:59:14 -0400 (0:00:00.418) 0:03:11.295 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 11:59:15 -0400 (0:00:00.573) 0:03:11.868 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 11:59:15 -0400 (0:00:00.440) 0:03:12.309 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 11:59:16 -0400 (0:00:00.478) 0:03:12.787 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 11:59:16 -0400 (0:00:00.545) 0:03:13.332 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 11:59:17 -0400 (0:00:00.562) 0:03:13.894 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 11:59:17 -0400 (0:00:00.435) 0:03:14.330 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 11:59:18 -0400 (0:00:00.534) 0:03:14.865 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 11:59:18 -0400 (0:00:00.511) 0:03:15.377 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 11:59:19 -0400 (0:00:00.414) 0:03:15.791 *********** ok: [managed-node2] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 11:59:19 -0400 (0:00:00.442) 0:03:16.234 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 11:59:20 -0400 (0:00:00.449) 0:03:16.683 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 11:59:20 -0400 (0:00:00.692) 0:03:17.376 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.019209", "end": "2025-07-06 11:59:22.223078", "rc": 0, "start": "2025-07-06 11:59:22.203869" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 11:59:22 -0400 (0:00:01.744) 0:03:19.120 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 11:59:23 -0400 (0:00:00.546) 0:03:19.666 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 11:59:24 -0400 (0:00:01.050) 0:03:20.717 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 11:59:24 -0400 (0:00:00.435) 0:03:21.153 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 11:59:25 -0400 (0:00:00.449) 0:03:21.602 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 11:59:25 -0400 (0:00:00.479) 0:03:22.081 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 11:59:26 -0400 (0:00:00.438) 0:03:22.520 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 11:59:26 -0400 (0:00:00.431) 0:03:22.951 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 11:59:26 -0400 (0:00:00.350) 0:03:23.302 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Change the file system signature on the logical volume created above] **** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:46 Sunday 06 July 2025 11:59:27 -0400 (0:00:00.448) 0:03:23.750 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 11:59:28 -0400 (0:00:00.895) 0:03:24.646 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 11:59:28 -0400 (0:00:00.579) 0:03:25.226 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 11:59:29 -0400 (0:00:00.304) 0:03:25.530 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 11:59:30 -0400 (0:00:00.928) 0:03:26.459 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 11:59:30 -0400 (0:00:00.343) 0:03:26.803 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 11:59:30 -0400 (0:00:00.290) 0:03:27.093 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 11:59:31 -0400 (0:00:00.350) 0:03:27.444 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 11:59:31 -0400 (0:00:00.262) 0:03:27.707 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 11:59:32 -0400 (0:00:00.782) 0:03:28.489 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 11:59:32 -0400 (0:00:00.406) 0:03:28.896 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "xfs", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 11:59:32 -0400 (0:00:00.342) 0:03:29.238 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 11:59:33 -0400 (0:00:00.383) 0:03:29.622 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 11:59:33 -0400 (0:00:00.392) 0:03:30.014 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 11:59:33 -0400 (0:00:00.347) 0:03:30.362 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 11:59:34 -0400 (0:00:00.307) 0:03:30.670 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 11:59:34 -0400 (0:00:00.389) 0:03:31.059 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 11:59:35 -0400 (0:00:00.605) 0:03:31.665 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 11:59:35 -0400 (0:00:00.260) 0:03:31.925 *********** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 11:59:41 -0400 (0:00:05.834) 0:03:37.760 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 11:59:41 -0400 (0:00:00.480) 0:03:38.241 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817445.2429266, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "3fceedeef6c619b69ada96279531b69ed89734ba", "ctime": 1751817445.2399266, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264045, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751817445.2399266, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1279, "uid": 0, "version": "1023161980", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 11:59:43 -0400 (0:00:01.925) 0:03:40.167 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 11:59:44 -0400 (0:00:00.392) 0:03:40.560 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 11:59:44 -0400 (0:00:00.357) 0:03:40.917 *********** ok: [managed-node2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 11:59:45 -0400 (0:00:00.619) 0:03:41.537 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 11:59:45 -0400 (0:00:00.393) 0:03:41.931 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 11:59:45 -0400 (0:00:00.380) 0:03:42.311 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 11:59:46 -0400 (0:00:00.423) 0:03:42.735 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 11:59:48 -0400 (0:00:02.147) 0:03:44.882 *********** ok: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 11:59:50 -0400 (0:00:01.914) 0:03:46.797 *********** skipping: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 11:59:50 -0400 (0:00:00.573) 0:03:47.370 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 11:59:52 -0400 (0:00:01.899) 0:03:49.270 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 11:59:54 -0400 (0:00:01.586) 0:03:50.856 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 11:59:54 -0400 (0:00:00.366) 0:03:51.222 *********** ok: [managed-node2] TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:59 Sunday 06 July 2025 11:59:57 -0400 (0:00:02.687) 0:03:53.910 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 11:59:58 -0400 (0:00:00.794) 0:03:54.704 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 11:59:58 -0400 (0:00:00.471) 0:03:55.176 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 11:59:59 -0400 (0:00:00.423) 0:03:55.599 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "01cf4af9-d439-40f4-b908-4e9a581e4eed" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "vvI2TT-wUCo-l5pF-mSH6-RUks-12mX-gowx2Y" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 12:00:00 -0400 (0:00:01.505) 0:03:57.105 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002999", "end": "2025-07-06 12:00:02.031763", "rc": 0, "start": "2025-07-06 12:00:02.028764" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 12:00:02 -0400 (0:00:01.663) 0:03:58.769 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003108", "end": "2025-07-06 12:00:03.680048", "failed_when_result": false, "rc": 0, "start": "2025-07-06 12:00:03.676940" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 12:00:04 -0400 (0:00:01.812) 0:04:00.582 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 12:00:05 -0400 (0:00:00.880) 0:04:01.462 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 12:00:05 -0400 (0:00:00.404) 0:04:01.866 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018701", "end": "2025-07-06 12:00:06.986258", "rc": 0, "start": "2025-07-06 12:00:06.967557" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 12:00:07 -0400 (0:00:01.993) 0:04:03.860 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 12:00:08 -0400 (0:00:00.683) 0:04:04.543 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 12:00:09 -0400 (0:00:00.873) 0:04:05.417 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 12:00:09 -0400 (0:00:00.571) 0:04:05.988 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 12:00:11 -0400 (0:00:01.991) 0:04:07.980 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 12:00:12 -0400 (0:00:00.615) 0:04:08.596 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 12:00:12 -0400 (0:00:00.540) 0:04:09.136 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 12:00:13 -0400 (0:00:00.534) 0:04:09.671 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 12:00:13 -0400 (0:00:00.471) 0:04:10.142 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 12:00:14 -0400 (0:00:00.643) 0:04:10.785 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 12:00:15 -0400 (0:00:00.644) 0:04:11.430 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 12:00:15 -0400 (0:00:00.939) 0:04:12.369 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 12:00:17 -0400 (0:00:01.879) 0:04:14.249 *********** skipping: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 12:00:18 -0400 (0:00:00.402) 0:04:14.651 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 12:00:19 -0400 (0:00:00.835) 0:04:15.486 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 12:00:19 -0400 (0:00:00.403) 0:04:15.890 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 12:00:19 -0400 (0:00:00.470) 0:04:16.360 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 12:00:20 -0400 (0:00:00.449) 0:04:16.809 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 12:00:20 -0400 (0:00:00.445) 0:04:17.254 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 12:00:21 -0400 (0:00:00.380) 0:04:17.635 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 12:00:21 -0400 (0:00:00.590) 0:04:18.226 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 12:00:22 -0400 (0:00:00.695) 0:04:18.921 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 12:00:22 -0400 (0:00:00.385) 0:04:19.307 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 12:00:23 -0400 (0:00:00.498) 0:04:19.805 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 12:00:23 -0400 (0:00:00.406) 0:04:20.212 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 12:00:24 -0400 (0:00:00.313) 0:04:20.525 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 12:00:25 -0400 (0:00:00.990) 0:04:21.515 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 12:00:26 -0400 (0:00:00.941) 0:04:22.457 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 12:00:26 -0400 (0:00:00.503) 0:04:22.960 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 12:00:26 -0400 (0:00:00.378) 0:04:23.339 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 12:00:27 -0400 (0:00:00.434) 0:04:23.773 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 12:00:27 -0400 (0:00:00.453) 0:04:24.227 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 12:00:28 -0400 (0:00:00.417) 0:04:24.645 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 12:00:28 -0400 (0:00:00.414) 0:04:25.059 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 12:00:29 -0400 (0:00:00.406) 0:04:25.465 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 12:00:30 -0400 (0:00:01.010) 0:04:26.476 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 12:00:30 -0400 (0:00:00.813) 0:04:27.289 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 12:00:31 -0400 (0:00:00.353) 0:04:27.643 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 12:00:31 -0400 (0:00:00.419) 0:04:28.062 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 12:00:32 -0400 (0:00:00.429) 0:04:28.492 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 12:00:32 -0400 (0:00:00.571) 0:04:29.064 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 12:00:33 -0400 (0:00:00.957) 0:04:30.021 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 12:00:34 -0400 (0:00:00.401) 0:04:30.423 *********** skipping: [managed-node2] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 12:00:34 -0400 (0:00:00.572) 0:04:30.995 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Sunday 06 July 2025 12:00:35 -0400 (0:00:00.793) 0:04:31.788 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Sunday 06 July 2025 12:00:35 -0400 (0:00:00.391) 0:04:32.180 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Sunday 06 July 2025 12:00:36 -0400 (0:00:00.619) 0:04:32.800 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Sunday 06 July 2025 12:00:36 -0400 (0:00:00.457) 0:04:33.257 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Sunday 06 July 2025 12:00:37 -0400 (0:00:00.487) 0:04:33.745 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Sunday 06 July 2025 12:00:37 -0400 (0:00:00.359) 0:04:34.104 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 12:00:38 -0400 (0:00:00.373) 0:04:34.478 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 12:00:38 -0400 (0:00:00.402) 0:04:34.881 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 12:00:39 -0400 (0:00:01.325) 0:04:36.207 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 12:00:40 -0400 (0:00:01.028) 0:04:37.235 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 12:00:41 -0400 (0:00:00.532) 0:04:37.767 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 12:00:41 -0400 (0:00:00.391) 0:04:38.159 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 12:00:42 -0400 (0:00:00.320) 0:04:38.480 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 12:00:42 -0400 (0:00:00.442) 0:04:38.922 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 12:00:42 -0400 (0:00:00.459) 0:04:39.381 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 12:00:43 -0400 (0:00:00.611) 0:04:39.993 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 12:00:44 -0400 (0:00:00.425) 0:04:40.418 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 12:00:45 -0400 (0:00:01.199) 0:04:41.618 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 12:00:45 -0400 (0:00:00.463) 0:04:42.081 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 12:00:46 -0400 (0:00:00.372) 0:04:42.454 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 12:00:46 -0400 (0:00:00.467) 0:04:42.921 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 12:00:46 -0400 (0:00:00.401) 0:04:43.323 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 12:00:47 -0400 (0:00:00.506) 0:04:43.830 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 12:00:47 -0400 (0:00:00.549) 0:04:44.380 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 12:00:48 -0400 (0:00:00.419) 0:04:44.799 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 12:00:48 -0400 (0:00:00.462) 0:04:45.261 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 12:00:49 -0400 (0:00:00.813) 0:04:46.075 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 12:00:50 -0400 (0:00:00.670) 0:04:46.745 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 12:00:52 -0400 (0:00:01.939) 0:04:48.684 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 12:00:52 -0400 (0:00:00.373) 0:04:49.058 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 12:00:53 -0400 (0:00:00.467) 0:04:49.526 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 12:00:53 -0400 (0:00:00.566) 0:04:50.092 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 12:00:54 -0400 (0:00:00.365) 0:04:50.458 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 12:00:54 -0400 (0:00:00.495) 0:04:50.953 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 12:00:54 -0400 (0:00:00.276) 0:04:51.229 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 12:00:55 -0400 (0:00:00.440) 0:04:51.670 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 12:00:55 -0400 (0:00:00.486) 0:04:52.156 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 12:00:56 -0400 (0:00:00.355) 0:04:52.512 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 12:00:56 -0400 (0:00:00.365) 0:04:52.878 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 12:00:57 -0400 (0:00:00.568) 0:04:53.449 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 12:00:57 -0400 (0:00:00.621) 0:04:54.070 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 12:00:58 -0400 (0:00:00.564) 0:04:54.634 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 12:00:58 -0400 (0:00:00.523) 0:04:55.157 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 12:00:59 -0400 (0:00:00.540) 0:04:55.698 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 12:00:59 -0400 (0:00:00.573) 0:04:56.272 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 12:01:00 -0400 (0:00:00.369) 0:04:56.642 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 12:01:00 -0400 (0:00:00.523) 0:04:57.166 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 12:01:01 -0400 (0:00:00.594) 0:04:57.761 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817428.7509246, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751817428.7509246, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 39930, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1751817428.7509246, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 12:01:03 -0400 (0:00:01.973) 0:04:59.734 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 12:01:03 -0400 (0:00:00.597) 0:05:00.331 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 12:01:04 -0400 (0:00:00.599) 0:05:00.931 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 12:01:04 -0400 (0:00:00.384) 0:05:01.315 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 12:01:05 -0400 (0:00:00.487) 0:05:01.803 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 12:01:05 -0400 (0:00:00.515) 0:05:02.377 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 12:01:06 -0400 (0:00:00.450) 0:05:02.828 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 12:01:06 -0400 (0:00:00.485) 0:05:03.314 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 12:01:10 -0400 (0:00:03.198) 0:05:06.512 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 12:01:10 -0400 (0:00:00.523) 0:05:07.036 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 12:01:11 -0400 (0:00:00.446) 0:05:07.483 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 12:01:11 -0400 (0:00:00.596) 0:05:08.080 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 12:01:12 -0400 (0:00:00.343) 0:05:08.423 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 12:01:12 -0400 (0:00:00.384) 0:05:08.808 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 12:01:12 -0400 (0:00:00.410) 0:05:09.218 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 12:01:13 -0400 (0:00:00.506) 0:05:09.725 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 12:01:13 -0400 (0:00:00.447) 0:05:10.172 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 12:01:14 -0400 (0:00:00.599) 0:05:10.772 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 12:01:14 -0400 (0:00:00.582) 0:05:11.355 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 12:01:15 -0400 (0:00:00.399) 0:05:11.754 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 12:01:15 -0400 (0:00:00.518) 0:05:12.272 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 12:01:16 -0400 (0:00:00.494) 0:05:12.767 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 12:01:16 -0400 (0:00:00.495) 0:05:13.262 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 12:01:17 -0400 (0:00:00.458) 0:05:13.721 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 12:01:17 -0400 (0:00:00.374) 0:05:14.095 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 12:01:18 -0400 (0:00:00.625) 0:05:14.721 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 12:01:18 -0400 (0:00:00.356) 0:05:15.078 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 12:01:19 -0400 (0:00:00.562) 0:05:15.640 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 12:01:19 -0400 (0:00:00.365) 0:05:16.006 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 12:01:20 -0400 (0:00:00.562) 0:05:16.568 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 12:01:20 -0400 (0:00:00.470) 0:05:17.039 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 12:01:21 -0400 (0:00:00.459) 0:05:17.498 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 12:01:21 -0400 (0:00:00.363) 0:05:17.861 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 12:01:23 -0400 (0:00:01.944) 0:05:19.806 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 12:01:25 -0400 (0:00:01.956) 0:05:21.763 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 12:01:25 -0400 (0:00:00.502) 0:05:22.266 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 12:01:26 -0400 (0:00:00.507) 0:05:22.773 *********** ok: [managed-node2] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 12:01:28 -0400 (0:00:01.935) 0:05:24.708 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 12:01:28 -0400 (0:00:00.534) 0:05:25.243 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 12:01:29 -0400 (0:00:00.589) 0:05:25.832 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 12:01:29 -0400 (0:00:00.411) 0:05:26.244 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 12:01:30 -0400 (0:00:00.475) 0:05:26.719 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 12:01:30 -0400 (0:00:00.423) 0:05:27.142 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 12:01:31 -0400 (0:00:00.438) 0:05:27.582 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 12:01:31 -0400 (0:00:00.267) 0:05:27.849 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 12:01:31 -0400 (0:00:00.536) 0:05:28.386 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 12:01:32 -0400 (0:00:00.454) 0:05:28.840 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 12:01:32 -0400 (0:00:00.515) 0:05:29.355 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 12:01:33 -0400 (0:00:00.333) 0:05:29.689 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 12:01:33 -0400 (0:00:00.341) 0:05:30.030 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 12:01:34 -0400 (0:00:00.469) 0:05:30.500 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 12:01:34 -0400 (0:00:00.344) 0:05:30.844 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 12:01:34 -0400 (0:00:00.530) 0:05:31.375 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 12:01:35 -0400 (0:00:00.619) 0:05:31.994 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 12:01:36 -0400 (0:00:00.583) 0:05:32.577 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 12:01:36 -0400 (0:00:00.457) 0:05:33.034 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 12:01:37 -0400 (0:00:00.525) 0:05:33.560 *********** ok: [managed-node2] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 12:01:37 -0400 (0:00:00.527) 0:05:34.087 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 12:01:38 -0400 (0:00:00.444) 0:05:34.532 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 12:01:39 -0400 (0:00:01.191) 0:05:35.723 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.021261", "end": "2025-07-06 12:01:40.776154", "rc": 0, "start": "2025-07-06 12:01:40.754893" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 12:01:41 -0400 (0:00:01.998) 0:05:37.722 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 12:01:41 -0400 (0:00:00.476) 0:05:38.198 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 12:01:42 -0400 (0:00:00.567) 0:05:38.766 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 12:01:42 -0400 (0:00:00.539) 0:05:39.305 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 12:01:43 -0400 (0:00:00.446) 0:05:39.752 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 12:01:43 -0400 (0:00:00.497) 0:05:40.249 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 12:01:44 -0400 (0:00:00.455) 0:05:40.705 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 12:01:44 -0400 (0:00:00.370) 0:05:41.076 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 12:01:45 -0400 (0:00:00.436) 0:05:41.513 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Re-run the role on the same volume without specifying fs_type] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:62 Sunday 06 July 2025 12:01:45 -0400 (0:00:00.446) 0:05:41.960 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 12:01:46 -0400 (0:00:01.308) 0:05:43.268 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 12:01:47 -0400 (0:00:00.738) 0:05:44.006 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 12:01:48 -0400 (0:00:00.575) 0:05:44.582 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 12:01:49 -0400 (0:00:00.853) 0:05:45.435 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 12:01:49 -0400 (0:00:00.399) 0:05:45.834 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 12:01:49 -0400 (0:00:00.364) 0:05:46.198 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 12:01:50 -0400 (0:00:00.486) 0:05:46.685 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 12:01:50 -0400 (0:00:00.489) 0:05:47.175 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 12:01:52 -0400 (0:00:01.242) 0:05:48.417 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 12:01:52 -0400 (0:00:00.445) 0:05:48.863 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 12:01:52 -0400 (0:00:00.497) 0:05:49.361 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 12:01:53 -0400 (0:00:00.394) 0:05:49.755 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 12:01:53 -0400 (0:00:00.483) 0:05:50.238 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 12:01:54 -0400 (0:00:00.561) 0:05:50.800 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 12:01:54 -0400 (0:00:00.581) 0:05:51.381 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 12:01:55 -0400 (0:00:00.324) 0:05:51.706 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 12:01:55 -0400 (0:00:00.586) 0:05:52.292 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 12:01:56 -0400 (0:00:00.478) 0:05:52.771 *********** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 12:02:02 -0400 (0:00:05.660) 0:05:58.432 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 12:02:02 -0400 (0:00:00.563) 0:05:58.995 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817445.2429266, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "3fceedeef6c619b69ada96279531b69ed89734ba", "ctime": 1751817445.2399266, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264045, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751817445.2399266, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1279, "uid": 0, "version": "1023161980", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 12:02:04 -0400 (0:00:01.896) 0:06:00.891 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 12:02:04 -0400 (0:00:00.364) 0:06:01.255 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 12:02:05 -0400 (0:00:00.377) 0:06:01.633 *********** ok: [managed-node2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 12:02:05 -0400 (0:00:00.609) 0:06:02.242 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 12:02:06 -0400 (0:00:00.445) 0:06:02.688 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 12:02:06 -0400 (0:00:00.429) 0:06:03.117 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 12:02:07 -0400 (0:00:00.357) 0:06:03.475 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 12:02:08 -0400 (0:00:01.648) 0:06:05.124 *********** ok: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 12:02:10 -0400 (0:00:01.687) 0:06:06.811 *********** skipping: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 12:02:10 -0400 (0:00:00.469) 0:06:07.280 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 12:02:12 -0400 (0:00:01.659) 0:06:08.939 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 12:02:14 -0400 (0:00:01.485) 0:06:10.425 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 12:02:14 -0400 (0:00:00.366) 0:06:10.792 *********** ok: [managed-node2] TASK [Verify the output of the duplicate volumes test] ************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:74 Sunday 06 July 2025 12:02:17 -0400 (0:00:03.115) 0:06:13.908 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:81 Sunday 06 July 2025 12:02:18 -0400 (0:00:00.591) 0:06:14.499 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 12:02:18 -0400 (0:00:00.681) 0:06:15.180 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 12:02:19 -0400 (0:00:00.434) 0:06:15.614 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 12:02:19 -0400 (0:00:00.375) 0:06:15.990 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "01cf4af9-d439-40f4-b908-4e9a581e4eed" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "vvI2TT-wUCo-l5pF-mSH6-RUks-12mX-gowx2Y" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 12:02:21 -0400 (0:00:01.890) 0:06:17.881 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002700", "end": "2025-07-06 12:02:22.684337", "rc": 0, "start": "2025-07-06 12:02:22.681637" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 12:02:23 -0400 (0:00:01.659) 0:06:19.541 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002562", "end": "2025-07-06 12:02:24.439848", "failed_when_result": false, "rc": 0, "start": "2025-07-06 12:02:24.437286" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 12:02:25 -0400 (0:00:01.902) 0:06:21.443 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 12:02:25 -0400 (0:00:00.803) 0:06:22.247 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 12:02:26 -0400 (0:00:00.488) 0:06:22.736 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.017264", "end": "2025-07-06 12:02:27.760082", "rc": 0, "start": "2025-07-06 12:02:27.742818" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 12:02:28 -0400 (0:00:01.931) 0:06:24.668 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 12:02:28 -0400 (0:00:00.671) 0:06:25.339 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 12:02:29 -0400 (0:00:00.799) 0:06:26.139 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 12:02:30 -0400 (0:00:00.571) 0:06:26.710 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 12:02:32 -0400 (0:00:01.820) 0:06:28.530 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 12:02:32 -0400 (0:00:00.384) 0:06:28.914 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 12:02:33 -0400 (0:00:00.559) 0:06:29.473 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 12:02:33 -0400 (0:00:00.462) 0:06:29.935 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 12:02:34 -0400 (0:00:00.565) 0:06:30.501 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 12:02:34 -0400 (0:00:00.429) 0:06:30.930 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 12:02:34 -0400 (0:00:00.425) 0:06:31.356 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 12:02:35 -0400 (0:00:00.810) 0:06:32.166 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 12:02:37 -0400 (0:00:01.721) 0:06:33.888 *********** skipping: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 12:02:37 -0400 (0:00:00.440) 0:06:34.329 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 12:02:39 -0400 (0:00:01.474) 0:06:35.803 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 12:02:39 -0400 (0:00:00.535) 0:06:36.338 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 12:02:40 -0400 (0:00:00.493) 0:06:36.832 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 12:02:40 -0400 (0:00:00.496) 0:06:37.328 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 12:02:41 -0400 (0:00:00.543) 0:06:37.871 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 12:02:41 -0400 (0:00:00.438) 0:06:38.309 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 12:02:42 -0400 (0:00:00.478) 0:06:38.788 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 12:02:42 -0400 (0:00:00.583) 0:06:39.371 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 12:02:43 -0400 (0:00:00.441) 0:06:39.812 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 12:02:43 -0400 (0:00:00.407) 0:06:40.220 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 12:02:44 -0400 (0:00:00.436) 0:06:40.657 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 12:02:44 -0400 (0:00:00.373) 0:06:41.030 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 12:02:45 -0400 (0:00:00.689) 0:06:41.719 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 12:02:46 -0400 (0:00:00.766) 0:06:42.485 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 12:02:46 -0400 (0:00:00.597) 0:06:43.083 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 12:02:47 -0400 (0:00:00.340) 0:06:43.424 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 12:02:47 -0400 (0:00:00.429) 0:06:43.853 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 12:02:47 -0400 (0:00:00.518) 0:06:44.371 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 12:02:48 -0400 (0:00:00.489) 0:06:44.861 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 12:02:48 -0400 (0:00:00.496) 0:06:45.358 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 12:02:49 -0400 (0:00:00.406) 0:06:45.765 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 12:02:50 -0400 (0:00:00.804) 0:06:46.569 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 12:02:50 -0400 (0:00:00.820) 0:06:47.390 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 12:02:51 -0400 (0:00:00.421) 0:06:47.811 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 12:02:51 -0400 (0:00:00.273) 0:06:48.084 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 12:02:52 -0400 (0:00:00.518) 0:06:48.603 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 12:02:52 -0400 (0:00:00.533) 0:06:49.136 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 12:02:53 -0400 (0:00:01.096) 0:06:50.233 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 12:02:54 -0400 (0:00:00.436) 0:06:50.669 *********** skipping: [managed-node2] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 12:02:54 -0400 (0:00:00.660) 0:06:51.329 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Sunday 06 July 2025 12:02:55 -0400 (0:00:00.804) 0:06:52.134 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Sunday 06 July 2025 12:02:56 -0400 (0:00:00.552) 0:06:52.686 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Sunday 06 July 2025 12:02:56 -0400 (0:00:00.641) 0:06:53.328 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Sunday 06 July 2025 12:02:57 -0400 (0:00:00.581) 0:06:53.909 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Sunday 06 July 2025 12:02:57 -0400 (0:00:00.401) 0:06:54.311 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Sunday 06 July 2025 12:02:58 -0400 (0:00:00.582) 0:06:54.893 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 12:02:58 -0400 (0:00:00.356) 0:06:55.250 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 12:02:59 -0400 (0:00:00.376) 0:06:55.627 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 12:02:59 -0400 (0:00:00.642) 0:06:56.270 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 12:03:00 -0400 (0:00:00.880) 0:06:57.150 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 12:03:01 -0400 (0:00:00.902) 0:06:58.052 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 12:03:01 -0400 (0:00:00.321) 0:06:58.374 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 12:03:02 -0400 (0:00:00.362) 0:06:58.736 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 12:03:02 -0400 (0:00:00.419) 0:06:59.155 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 12:03:03 -0400 (0:00:00.310) 0:06:59.466 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 12:03:03 -0400 (0:00:00.480) 0:06:59.946 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 12:03:03 -0400 (0:00:00.389) 0:07:00.336 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 12:03:05 -0400 (0:00:01.165) 0:07:01.501 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 12:03:05 -0400 (0:00:00.310) 0:07:01.812 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 12:03:05 -0400 (0:00:00.286) 0:07:02.098 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 12:03:06 -0400 (0:00:00.483) 0:07:02.582 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 12:03:06 -0400 (0:00:00.519) 0:07:03.101 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 12:03:07 -0400 (0:00:00.370) 0:07:03.472 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 12:03:07 -0400 (0:00:00.550) 0:07:04.023 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 12:03:08 -0400 (0:00:00.500) 0:07:04.523 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 12:03:08 -0400 (0:00:00.390) 0:07:04.913 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 12:03:09 -0400 (0:00:00.971) 0:07:05.885 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 12:03:09 -0400 (0:00:00.499) 0:07:06.384 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 12:03:11 -0400 (0:00:02.009) 0:07:08.393 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 12:03:12 -0400 (0:00:00.579) 0:07:08.972 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 12:03:13 -0400 (0:00:00.567) 0:07:09.539 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 12:03:13 -0400 (0:00:00.488) 0:07:10.028 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 12:03:14 -0400 (0:00:00.467) 0:07:10.496 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 12:03:14 -0400 (0:00:00.678) 0:07:11.174 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 12:03:15 -0400 (0:00:00.648) 0:07:11.823 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 12:03:15 -0400 (0:00:00.564) 0:07:12.388 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 12:03:16 -0400 (0:00:00.610) 0:07:12.998 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 12:03:17 -0400 (0:00:00.427) 0:07:13.426 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 12:03:17 -0400 (0:00:00.478) 0:07:13.905 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 12:03:17 -0400 (0:00:00.401) 0:07:14.307 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 12:03:18 -0400 (0:00:00.741) 0:07:15.048 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 12:03:19 -0400 (0:00:00.589) 0:07:15.638 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 12:03:19 -0400 (0:00:00.480) 0:07:16.119 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 12:03:20 -0400 (0:00:00.502) 0:07:16.621 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 12:03:20 -0400 (0:00:00.613) 0:07:17.235 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 12:03:21 -0400 (0:00:00.394) 0:07:17.629 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 12:03:21 -0400 (0:00:00.673) 0:07:18.303 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 12:03:22 -0400 (0:00:00.601) 0:07:18.904 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817428.7509246, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751817428.7509246, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 39930, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1751817428.7509246, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 12:03:24 -0400 (0:00:01.929) 0:07:20.833 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 12:03:25 -0400 (0:00:00.617) 0:07:21.451 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 12:03:25 -0400 (0:00:00.440) 0:07:21.891 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 12:03:25 -0400 (0:00:00.481) 0:07:22.373 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 12:03:27 -0400 (0:00:01.307) 0:07:23.680 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 12:03:27 -0400 (0:00:00.475) 0:07:24.156 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 12:03:28 -0400 (0:00:00.475) 0:07:24.631 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 12:03:28 -0400 (0:00:00.523) 0:07:25.154 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 12:03:31 -0400 (0:00:02.614) 0:07:27.769 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 12:03:31 -0400 (0:00:00.440) 0:07:28.210 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 12:03:32 -0400 (0:00:00.283) 0:07:28.493 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 12:03:32 -0400 (0:00:00.526) 0:07:29.020 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 12:03:33 -0400 (0:00:00.433) 0:07:29.454 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 12:03:33 -0400 (0:00:00.466) 0:07:29.920 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 12:03:33 -0400 (0:00:00.418) 0:07:30.339 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 12:03:34 -0400 (0:00:00.394) 0:07:30.733 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 12:03:34 -0400 (0:00:00.397) 0:07:31.131 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 12:03:35 -0400 (0:00:00.590) 0:07:31.721 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 12:03:35 -0400 (0:00:00.327) 0:07:32.048 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 12:03:36 -0400 (0:00:00.470) 0:07:32.519 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 12:03:36 -0400 (0:00:00.382) 0:07:32.902 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 12:03:36 -0400 (0:00:00.474) 0:07:33.377 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 12:03:37 -0400 (0:00:00.530) 0:07:33.907 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 12:03:37 -0400 (0:00:00.362) 0:07:34.270 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 12:03:38 -0400 (0:00:00.364) 0:07:34.634 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 12:03:38 -0400 (0:00:00.484) 0:07:35.119 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 12:03:39 -0400 (0:00:00.651) 0:07:35.771 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 12:03:39 -0400 (0:00:00.440) 0:07:36.211 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 12:03:40 -0400 (0:00:00.304) 0:07:36.516 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 12:03:40 -0400 (0:00:00.538) 0:07:37.054 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 12:03:41 -0400 (0:00:00.437) 0:07:37.491 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 12:03:41 -0400 (0:00:00.503) 0:07:37.995 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 12:03:42 -0400 (0:00:00.508) 0:07:38.503 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 12:03:44 -0400 (0:00:02.001) 0:07:40.505 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 12:03:45 -0400 (0:00:01.857) 0:07:42.362 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 12:03:46 -0400 (0:00:00.435) 0:07:42.798 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 12:03:46 -0400 (0:00:00.425) 0:07:43.223 *********** ok: [managed-node2] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 12:03:48 -0400 (0:00:01.968) 0:07:45.192 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 12:03:49 -0400 (0:00:00.458) 0:07:45.651 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 12:03:49 -0400 (0:00:00.476) 0:07:46.128 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 12:03:50 -0400 (0:00:00.599) 0:07:46.727 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 12:03:50 -0400 (0:00:00.357) 0:07:47.085 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 12:03:51 -0400 (0:00:00.438) 0:07:47.523 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 12:03:51 -0400 (0:00:00.460) 0:07:47.984 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 12:03:51 -0400 (0:00:00.356) 0:07:48.341 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 12:03:52 -0400 (0:00:00.344) 0:07:48.685 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 12:03:52 -0400 (0:00:00.457) 0:07:49.142 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 12:03:53 -0400 (0:00:00.555) 0:07:49.698 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 12:03:53 -0400 (0:00:00.415) 0:07:50.114 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 12:03:54 -0400 (0:00:00.479) 0:07:50.594 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 12:03:54 -0400 (0:00:00.446) 0:07:51.040 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 12:03:55 -0400 (0:00:00.432) 0:07:51.473 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 12:03:55 -0400 (0:00:00.417) 0:07:51.891 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 12:03:56 -0400 (0:00:00.516) 0:07:52.407 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 12:03:56 -0400 (0:00:00.871) 0:07:53.279 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 12:03:57 -0400 (0:00:00.418) 0:07:53.697 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 12:03:57 -0400 (0:00:00.561) 0:07:54.319 *********** ok: [managed-node2] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 12:03:58 -0400 (0:00:00.503) 0:07:54.822 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 12:03:58 -0400 (0:00:00.413) 0:07:55.236 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 12:03:59 -0400 (0:00:00.598) 0:07:55.835 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.019543", "end": "2025-07-06 12:04:00.945498", "rc": 0, "start": "2025-07-06 12:04:00.925955" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 12:04:01 -0400 (0:00:01.989) 0:07:57.824 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 12:04:01 -0400 (0:00:00.433) 0:07:58.258 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 12:04:02 -0400 (0:00:00.293) 0:07:58.551 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 12:04:02 -0400 (0:00:00.537) 0:07:59.089 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 12:04:03 -0400 (0:00:00.579) 0:07:59.669 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 12:04:03 -0400 (0:00:00.431) 0:08:00.101 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 12:04:04 -0400 (0:00:00.485) 0:08:00.586 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 12:04:04 -0400 (0:00:00.542) 0:08:01.129 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 12:04:05 -0400 (0:00:00.407) 0:08:01.537 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat the previous invocation to verify idempotence] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:84 Sunday 06 July 2025 12:04:05 -0400 (0:00:00.467) 0:08:02.005 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 12:04:06 -0400 (0:00:01.218) 0:08:03.223 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 12:04:07 -0400 (0:00:00.601) 0:08:03.825 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 12:04:07 -0400 (0:00:00.574) 0:08:04.399 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 12:04:08 -0400 (0:00:00.917) 0:08:05.317 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 12:04:09 -0400 (0:00:00.716) 0:08:06.033 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 12:04:10 -0400 (0:00:00.493) 0:08:06.526 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 12:04:10 -0400 (0:00:00.373) 0:08:06.900 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 12:04:10 -0400 (0:00:00.501) 0:08:07.402 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 12:04:11 -0400 (0:00:00.861) 0:08:08.263 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 12:04:12 -0400 (0:00:00.501) 0:08:08.765 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "xfs", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 12:04:12 -0400 (0:00:00.608) 0:08:09.373 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 12:04:13 -0400 (0:00:00.507) 0:08:09.881 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 12:04:13 -0400 (0:00:00.350) 0:08:10.231 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 12:04:14 -0400 (0:00:00.444) 0:08:10.676 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 12:04:14 -0400 (0:00:00.260) 0:08:10.936 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 12:04:14 -0400 (0:00:00.195) 0:08:11.132 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 12:04:15 -0400 (0:00:00.607) 0:08:11.740 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 12:04:15 -0400 (0:00:00.432) 0:08:12.172 *********** ok: [managed-node2] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 12:04:21 -0400 (0:00:05.979) 0:08:18.152 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 12:04:22 -0400 (0:00:00.353) 0:08:18.505 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817445.2429266, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "3fceedeef6c619b69ada96279531b69ed89734ba", "ctime": 1751817445.2399266, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264045, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751817445.2399266, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1279, "uid": 0, "version": "1023161980", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 12:04:23 -0400 (0:00:01.821) 0:08:20.327 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 12:04:24 -0400 (0:00:00.494) 0:08:20.822 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 12:04:24 -0400 (0:00:00.393) 0:08:21.216 *********** ok: [managed-node2] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 12:04:25 -0400 (0:00:00.573) 0:08:21.790 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 12:04:26 -0400 (0:00:00.656) 0:08:22.446 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 12:04:26 -0400 (0:00:00.384) 0:08:22.831 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 12:04:26 -0400 (0:00:00.514) 0:08:23.346 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 12:04:28 -0400 (0:00:01.762) 0:08:25.109 *********** ok: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 12:04:30 -0400 (0:00:01.574) 0:08:26.683 *********** skipping: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'group': None, u'dump': 0, u'passno': 0, u'fstype': u'xfs', u'state': u'mounted', u'mode': None, u'owner': None, u'path': u'/opt/test1', u'opts': u'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 12:04:30 -0400 (0:00:00.562) 0:08:27.246 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 12:04:32 -0400 (0:00:01.840) 0:08:29.086 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 12:04:34 -0400 (0:00:01.798) 0:08:30.885 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 12:04:34 -0400 (0:00:00.302) 0:08:31.187 *********** ok: [managed-node2] TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:97 Sunday 06 July 2025 12:04:38 -0400 (0:00:03.367) 0:08:34.554 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 12:04:39 -0400 (0:00:00.849) 0:08:35.404 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 12:04:39 -0400 (0:00:00.529) 0:08:35.933 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 12:04:39 -0400 (0:00:00.467) 0:08:36.400 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "01cf4af9-d439-40f4-b908-4e9a581e4eed" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "vvI2TT-wUCo-l5pF-mSH6-RUks-12mX-gowx2Y" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 12:04:41 -0400 (0:00:01.748) 0:08:38.149 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002820", "end": "2025-07-06 12:04:43.001873", "rc": 0, "start": "2025-07-06 12:04:42.999053" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 12:04:43 -0400 (0:00:01.787) 0:08:39.936 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002613", "end": "2025-07-06 12:04:44.953682", "failed_when_result": false, "rc": 0, "start": "2025-07-06 12:04:44.951069" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 12:04:45 -0400 (0:00:01.850) 0:08:41.786 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 12:04:46 -0400 (0:00:00.875) 0:08:42.662 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 12:04:46 -0400 (0:00:00.383) 0:08:43.046 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.018694", "end": "2025-07-06 12:04:47.959153", "rc": 0, "start": "2025-07-06 12:04:47.940459" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 12:04:48 -0400 (0:00:01.866) 0:08:44.912 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 12:04:49 -0400 (0:00:01.093) 0:08:46.006 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 12:04:50 -0400 (0:00:01.006) 0:08:47.013 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 12:04:51 -0400 (0:00:00.491) 0:08:47.504 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 12:04:53 -0400 (0:00:02.118) 0:08:49.623 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 12:04:53 -0400 (0:00:00.498) 0:08:50.121 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 12:04:54 -0400 (0:00:00.491) 0:08:50.613 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 12:04:54 -0400 (0:00:00.580) 0:08:51.193 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 12:04:55 -0400 (0:00:00.582) 0:08:51.776 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 12:04:56 -0400 (0:00:00.718) 0:08:52.494 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 12:04:56 -0400 (0:00:00.364) 0:08:52.859 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 12:04:57 -0400 (0:00:00.668) 0:08:53.528 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 12:04:58 -0400 (0:00:01.737) 0:08:55.265 *********** skipping: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 12:04:59 -0400 (0:00:00.546) 0:08:55.812 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 12:05:00 -0400 (0:00:01.076) 0:08:56.888 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 12:05:00 -0400 (0:00:00.495) 0:08:57.384 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 12:05:01 -0400 (0:00:00.507) 0:08:57.892 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 12:05:02 -0400 (0:00:00.602) 0:08:58.494 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 12:05:02 -0400 (0:00:00.380) 0:08:58.875 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 12:05:03 -0400 (0:00:00.567) 0:08:59.442 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 12:05:03 -0400 (0:00:00.444) 0:08:59.887 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 12:05:04 -0400 (0:00:00.615) 0:09:00.502 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 12:05:04 -0400 (0:00:00.483) 0:09:00.986 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 12:05:04 -0400 (0:00:00.403) 0:09:01.390 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 12:05:05 -0400 (0:00:00.514) 0:09:01.904 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 12:05:05 -0400 (0:00:00.406) 0:09:02.311 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 12:05:07 -0400 (0:00:01.110) 0:09:03.421 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 12:05:08 -0400 (0:00:01.062) 0:09:04.483 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 12:05:08 -0400 (0:00:00.412) 0:09:04.896 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 12:05:08 -0400 (0:00:00.444) 0:09:05.340 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 12:05:09 -0400 (0:00:00.327) 0:09:05.668 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 12:05:09 -0400 (0:00:00.384) 0:09:06.053 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 12:05:10 -0400 (0:00:00.490) 0:09:06.543 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 12:05:10 -0400 (0:00:00.411) 0:09:06.955 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 12:05:10 -0400 (0:00:00.348) 0:09:07.303 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 12:05:12 -0400 (0:00:01.100) 0:09:08.404 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 12:05:12 -0400 (0:00:00.975) 0:09:09.379 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 12:05:13 -0400 (0:00:00.883) 0:09:10.263 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 12:05:14 -0400 (0:00:00.376) 0:09:10.639 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 12:05:14 -0400 (0:00:00.410) 0:09:11.049 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 12:05:15 -0400 (0:00:00.426) 0:09:11.476 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 12:05:16 -0400 (0:00:01.170) 0:09:12.646 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 12:05:16 -0400 (0:00:00.464) 0:09:13.111 *********** skipping: [managed-node2] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 12:05:17 -0400 (0:00:00.504) 0:09:13.615 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Sunday 06 July 2025 12:05:18 -0400 (0:00:01.020) 0:09:14.636 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Sunday 06 July 2025 12:05:18 -0400 (0:00:00.497) 0:09:15.133 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Sunday 06 July 2025 12:05:19 -0400 (0:00:00.655) 0:09:15.789 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Sunday 06 July 2025 12:05:19 -0400 (0:00:00.406) 0:09:16.195 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Sunday 06 July 2025 12:05:20 -0400 (0:00:00.408) 0:09:16.604 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Sunday 06 July 2025 12:05:20 -0400 (0:00:00.400) 0:09:17.004 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 12:05:20 -0400 (0:00:00.346) 0:09:17.351 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 12:05:21 -0400 (0:00:00.484) 0:09:17.835 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 12:05:22 -0400 (0:00:01.133) 0:09:18.968 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 12:05:23 -0400 (0:00:01.005) 0:09:19.973 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 12:05:24 -0400 (0:00:00.440) 0:09:20.414 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 12:05:24 -0400 (0:00:00.512) 0:09:20.926 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 12:05:25 -0400 (0:00:00.549) 0:09:21.476 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 12:05:25 -0400 (0:00:00.565) 0:09:22.042 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 12:05:26 -0400 (0:00:00.527) 0:09:22.569 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 12:05:26 -0400 (0:00:00.571) 0:09:23.196 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 12:05:27 -0400 (0:00:00.391) 0:09:23.587 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 12:05:28 -0400 (0:00:01.199) 0:09:24.787 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 12:05:28 -0400 (0:00:00.461) 0:09:25.248 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 12:05:29 -0400 (0:00:00.487) 0:09:25.736 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 12:05:29 -0400 (0:00:00.518) 0:09:26.254 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 12:05:30 -0400 (0:00:00.432) 0:09:26.687 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 12:05:30 -0400 (0:00:00.525) 0:09:27.213 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 12:05:31 -0400 (0:00:00.487) 0:09:27.701 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 12:05:31 -0400 (0:00:00.394) 0:09:28.095 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 12:05:32 -0400 (0:00:00.517) 0:09:28.612 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 12:05:33 -0400 (0:00:00.912) 0:09:29.525 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 12:05:33 -0400 (0:00:00.531) 0:09:30.057 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 12:05:35 -0400 (0:00:02.171) 0:09:32.228 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 12:05:36 -0400 (0:00:00.396) 0:09:32.625 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 12:05:36 -0400 (0:00:00.452) 0:09:33.078 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 12:05:37 -0400 (0:00:00.540) 0:09:33.618 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 12:05:38 -0400 (0:00:01.051) 0:09:34.670 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 12:05:38 -0400 (0:00:00.421) 0:09:35.092 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 12:05:39 -0400 (0:00:00.444) 0:09:35.537 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 12:05:39 -0400 (0:00:00.619) 0:09:36.156 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 12:05:40 -0400 (0:00:00.348) 0:09:36.505 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 12:05:40 -0400 (0:00:00.557) 0:09:37.062 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 12:05:41 -0400 (0:00:00.383) 0:09:37.445 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 12:05:41 -0400 (0:00:00.510) 0:09:37.955 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 12:05:42 -0400 (0:00:00.655) 0:09:38.611 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 12:05:42 -0400 (0:00:00.563) 0:09:39.174 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 12:05:43 -0400 (0:00:00.517) 0:09:39.692 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 12:05:43 -0400 (0:00:00.525) 0:09:40.217 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 12:05:44 -0400 (0:00:00.480) 0:09:40.698 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 12:05:44 -0400 (0:00:00.573) 0:09:41.271 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 12:05:45 -0400 (0:00:00.612) 0:09:41.884 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 12:05:46 -0400 (0:00:00.678) 0:09:42.563 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817428.7509246, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751817428.7509246, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 39930, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1751817428.7509246, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 12:05:48 -0400 (0:00:01.958) 0:09:44.522 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 12:05:48 -0400 (0:00:00.515) 0:09:45.037 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 12:05:49 -0400 (0:00:00.568) 0:09:45.606 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 12:05:49 -0400 (0:00:00.554) 0:09:46.160 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 12:05:50 -0400 (0:00:00.447) 0:09:46.607 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 12:05:50 -0400 (0:00:00.488) 0:09:47.096 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 12:05:51 -0400 (0:00:00.525) 0:09:47.622 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 12:05:51 -0400 (0:00:00.530) 0:09:48.152 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 12:05:54 -0400 (0:00:03.212) 0:09:51.365 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 12:05:55 -0400 (0:00:00.554) 0:09:51.920 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 12:05:56 -0400 (0:00:00.482) 0:09:52.403 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 12:05:56 -0400 (0:00:00.539) 0:09:52.943 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 12:05:57 -0400 (0:00:00.568) 0:09:53.512 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 12:05:57 -0400 (0:00:00.428) 0:09:53.940 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 12:05:57 -0400 (0:00:00.454) 0:09:54.394 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 12:05:58 -0400 (0:00:00.416) 0:09:54.811 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 12:05:58 -0400 (0:00:00.390) 0:09:55.201 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 12:05:59 -0400 (0:00:00.514) 0:09:55.715 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 12:05:59 -0400 (0:00:00.648) 0:09:56.364 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 12:06:00 -0400 (0:00:00.399) 0:09:56.763 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 12:06:00 -0400 (0:00:00.545) 0:09:57.308 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 12:06:01 -0400 (0:00:00.398) 0:09:57.707 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 12:06:01 -0400 (0:00:00.438) 0:09:58.145 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 12:06:02 -0400 (0:00:00.452) 0:09:58.598 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 12:06:02 -0400 (0:00:00.514) 0:09:59.112 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 12:06:03 -0400 (0:00:00.495) 0:09:59.608 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 12:06:03 -0400 (0:00:00.444) 0:10:00.052 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 12:06:03 -0400 (0:00:00.336) 0:10:00.389 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 12:06:04 -0400 (0:00:00.498) 0:10:00.888 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 12:06:04 -0400 (0:00:00.414) 0:10:01.303 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 12:06:05 -0400 (0:00:00.535) 0:10:01.838 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 12:06:05 -0400 (0:00:00.387) 0:10:02.226 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 12:06:06 -0400 (0:00:00.530) 0:10:02.757 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 12:06:08 -0400 (0:00:01.989) 0:10:04.747 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 12:06:10 -0400 (0:00:01.769) 0:10:06.516 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 12:06:10 -0400 (0:00:00.368) 0:10:06.885 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 12:06:11 -0400 (0:00:01.009) 0:10:07.894 *********** ok: [managed-node2] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 12:06:13 -0400 (0:00:01.726) 0:10:09.621 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 12:06:13 -0400 (0:00:00.376) 0:10:09.998 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 12:06:13 -0400 (0:00:00.360) 0:10:10.358 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 12:06:14 -0400 (0:00:00.451) 0:10:10.810 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 12:06:14 -0400 (0:00:00.484) 0:10:11.294 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 12:06:15 -0400 (0:00:00.518) 0:10:11.813 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 12:06:15 -0400 (0:00:00.432) 0:10:12.245 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 12:06:16 -0400 (0:00:00.368) 0:10:12.614 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 12:06:16 -0400 (0:00:00.436) 0:10:13.051 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 12:06:16 -0400 (0:00:00.333) 0:10:13.384 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 12:06:17 -0400 (0:00:00.403) 0:10:13.788 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 12:06:17 -0400 (0:00:00.437) 0:10:14.225 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 12:06:18 -0400 (0:00:00.406) 0:10:14.632 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 12:06:18 -0400 (0:00:00.511) 0:10:15.143 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 12:06:19 -0400 (0:00:00.407) 0:10:15.551 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 12:06:19 -0400 (0:00:00.215) 0:10:15.766 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 12:06:19 -0400 (0:00:00.340) 0:10:16.107 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 12:06:20 -0400 (0:00:00.317) 0:10:16.425 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 12:06:20 -0400 (0:00:00.462) 0:10:16.887 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 12:06:21 -0400 (0:00:00.549) 0:10:17.437 *********** ok: [managed-node2] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 12:06:21 -0400 (0:00:00.259) 0:10:17.696 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 12:06:21 -0400 (0:00:00.534) 0:10:18.231 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 12:06:22 -0400 (0:00:00.439) 0:10:18.671 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.017602", "end": "2025-07-06 12:06:23.623025", "rc": 0, "start": "2025-07-06 12:06:23.605423" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 12:06:23 -0400 (0:00:01.670) 0:10:20.342 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 12:06:24 -0400 (0:00:00.352) 0:10:20.695 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 12:06:24 -0400 (0:00:00.591) 0:10:21.287 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 12:06:25 -0400 (0:00:00.410) 0:10:21.697 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 12:06:25 -0400 (0:00:00.362) 0:10:22.060 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 12:06:26 -0400 (0:00:00.429) 0:10:22.489 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 12:06:26 -0400 (0:00:00.362) 0:10:22.852 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 12:06:26 -0400 (0:00:00.375) 0:10:23.227 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 12:06:27 -0400 (0:00:00.462) 0:10:23.690 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Remove the FS] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:100 Sunday 06 July 2025 12:06:27 -0400 (0:00:00.468) 0:10:24.158 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 12:06:28 -0400 (0:00:01.082) 0:10:25.240 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 12:06:29 -0400 (0:00:00.804) 0:10:26.044 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 12:06:29 -0400 (0:00:00.312) 0:10:26.357 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 12:06:30 -0400 (0:00:00.820) 0:10:27.178 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 12:06:31 -0400 (0:00:00.375) 0:10:27.553 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 12:06:31 -0400 (0:00:00.346) 0:10:27.900 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 12:06:31 -0400 (0:00:00.499) 0:10:28.400 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 12:06:32 -0400 (0:00:00.386) 0:10:28.786 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 12:06:33 -0400 (0:00:01.141) 0:10:29.927 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 12:06:34 -0400 (0:00:01.035) 0:10:30.963 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "fs_type": "unformatted", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 12:06:35 -0400 (0:00:00.624) 0:10:31.587 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 12:06:35 -0400 (0:00:00.564) 0:10:32.152 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 12:06:36 -0400 (0:00:00.344) 0:10:32.496 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 12:06:36 -0400 (0:00:00.628) 0:10:33.125 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 12:06:37 -0400 (0:00:00.484) 0:10:33.610 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 12:06:37 -0400 (0:00:00.530) 0:10:34.140 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 12:06:38 -0400 (0:00:00.626) 0:10:34.767 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 12:06:38 -0400 (0:00:00.519) 0:10:35.287 *********** changed: [managed-node2] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "path": "/opt/test1", "state": "absent" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "unformatted", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 12:06:45 -0400 (0:00:06.558) 0:10:41.845 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 12:06:45 -0400 (0:00:00.399) 0:10:42.245 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817445.2429266, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "3fceedeef6c619b69ada96279531b69ed89734ba", "ctime": 1751817445.2399266, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264045, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751817445.2399266, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1279, "uid": 0, "version": "1023161980", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 12:06:47 -0400 (0:00:01.829) 0:10:44.074 *********** ok: [managed-node2] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 12:06:49 -0400 (0:00:01.938) 0:10:46.012 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 12:06:50 -0400 (0:00:00.443) 0:10:46.456 *********** ok: [managed-node2] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "path": "/opt/test1", "state": "absent" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "unformatted", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 12:06:50 -0400 (0:00:00.733) 0:10:47.190 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "unformatted", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 12:06:51 -0400 (0:00:00.534) 0:10:47.725 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 12:06:51 -0400 (0:00:00.536) 0:10:48.261 *********** changed: [managed-node2] => (item={u'src': u'/dev/mapper/foo-test1', u'state': u'absent', u'fstype': u'xfs', u'path': u'/opt/test1'}) => { "ansible_loop_var": "mount_info", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } ok: [managed-node2] => (item={u'path': u'/opt/test1', u'state': u'absent'}) => { "ansible_loop_var": "mount_info", "changed": false, "dump": "0", "fstab": "/etc/fstab", "mount_info": { "path": "/opt/test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 12:06:55 -0400 (0:00:03.280) 0:10:51.542 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 12:06:57 -0400 (0:00:01.957) 0:10:53.500 *********** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 12:06:57 -0400 (0:00:00.468) 0:10:53.968 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 12:06:57 -0400 (0:00:00.338) 0:10:54.307 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 12:06:59 -0400 (0:00:02.073) 0:10:56.380 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 12:07:02 -0400 (0:00:02.159) 0:10:58.540 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 12:07:02 -0400 (0:00:00.385) 0:10:58.925 *********** ok: [managed-node2] TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:112 Sunday 06 July 2025 12:07:05 -0400 (0:00:02.765) 0:11:01.690 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 12:07:06 -0400 (0:00:01.684) 0:11:03.375 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "unformatted", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 12:07:07 -0400 (0:00:00.485) 0:11:03.861 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 12:07:07 -0400 (0:00:00.443) 0:11:04.305 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/foo-test1", "size": "5G", "type": "lvm", "uuid": "" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "vvI2TT-wUCo-l5pF-mSH6-RUks-12mX-gowx2Y" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 12:07:09 -0400 (0:00:01.844) 0:11:06.149 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002894", "end": "2025-07-06 12:07:11.073171", "rc": 0, "start": "2025-07-06 12:07:11.070277" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 12:07:11 -0400 (0:00:01.779) 0:11:07.929 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003368", "end": "2025-07-06 12:07:12.976393", "failed_when_result": false, "rc": 0, "start": "2025-07-06 12:07:12.973025" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 12:07:13 -0400 (0:00:01.836) 0:11:09.765 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 12:07:14 -0400 (0:00:00.957) 0:11:10.723 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 12:07:14 -0400 (0:00:00.411) 0:11:11.135 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.020769", "end": "2025-07-06 12:07:16.182057", "rc": 0, "start": "2025-07-06 12:07:16.161288" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 12:07:16 -0400 (0:00:01.930) 0:11:13.065 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 12:07:17 -0400 (0:00:00.496) 0:11:13.562 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 12:07:18 -0400 (0:00:01.155) 0:11:14.717 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 12:07:18 -0400 (0:00:00.533) 0:11:15.251 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 12:07:20 -0400 (0:00:01.967) 0:11:17.218 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 12:07:21 -0400 (0:00:00.480) 0:11:17.699 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 12:07:21 -0400 (0:00:00.627) 0:11:18.326 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 12:07:22 -0400 (0:00:00.439) 0:11:18.766 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 12:07:22 -0400 (0:00:00.549) 0:11:19.316 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 12:07:23 -0400 (0:00:00.497) 0:11:19.814 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 12:07:23 -0400 (0:00:00.480) 0:11:20.294 *********** ok: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 12:07:24 -0400 (0:00:00.614) 0:11:20.909 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 12:07:26 -0400 (0:00:01.663) 0:11:22.572 *********** skipping: [managed-node2] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 12:07:26 -0400 (0:00:00.591) 0:11:23.164 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 12:07:27 -0400 (0:00:00.892) 0:11:24.056 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 12:07:27 -0400 (0:00:00.310) 0:11:24.367 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 12:07:28 -0400 (0:00:00.411) 0:11:24.779 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 12:07:28 -0400 (0:00:00.499) 0:11:25.278 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 12:07:29 -0400 (0:00:00.609) 0:11:25.888 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 12:07:29 -0400 (0:00:00.411) 0:11:26.299 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 12:07:30 -0400 (0:00:00.396) 0:11:26.695 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 12:07:30 -0400 (0:00:00.381) 0:11:27.077 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 12:07:31 -0400 (0:00:00.479) 0:11:27.556 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 12:07:31 -0400 (0:00:00.543) 0:11:28.100 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 12:07:32 -0400 (0:00:01.100) 0:11:29.201 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 12:07:33 -0400 (0:00:00.434) 0:11:29.636 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 12:07:34 -0400 (0:00:01.009) 0:11:30.645 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 12:07:35 -0400 (0:00:00.881) 0:11:31.527 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 12:07:35 -0400 (0:00:00.582) 0:11:32.110 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 12:07:36 -0400 (0:00:00.413) 0:11:32.523 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 12:07:36 -0400 (0:00:00.499) 0:11:33.023 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 12:07:37 -0400 (0:00:00.379) 0:11:33.403 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 12:07:37 -0400 (0:00:00.500) 0:11:33.903 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 12:07:37 -0400 (0:00:00.291) 0:11:34.195 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 12:07:38 -0400 (0:00:00.492) 0:11:34.687 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 12:07:39 -0400 (0:00:01.001) 0:11:35.689 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 12:07:40 -0400 (0:00:01.009) 0:11:36.699 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 12:07:40 -0400 (0:00:00.591) 0:11:37.291 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 12:07:41 -0400 (0:00:00.457) 0:11:37.749 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 12:07:41 -0400 (0:00:00.348) 0:11:38.097 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 12:07:42 -0400 (0:00:00.414) 0:11:38.511 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 12:07:43 -0400 (0:00:01.089) 0:11:39.601 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 12:07:43 -0400 (0:00:00.536) 0:11:40.138 *********** skipping: [managed-node2] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 12:07:44 -0400 (0:00:00.544) 0:11:40.682 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 TASK [Set variables used by tests] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2 Sunday 06 July 2025 12:07:45 -0400 (0:00:01.065) 0:11:41.748 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6 Sunday 06 July 2025 12:07:45 -0400 (0:00:00.572) 0:11:42.320 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14 Sunday 06 July 2025 12:07:46 -0400 (0:00:00.678) 0:11:42.999 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23 Sunday 06 July 2025 12:07:47 -0400 (0:00:00.554) 0:11:43.553 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32 Sunday 06 July 2025 12:07:47 -0400 (0:00:00.427) 0:11:43.981 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41 Sunday 06 July 2025 12:07:48 -0400 (0:00:00.507) 0:11:44.488 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 12:07:48 -0400 (0:00:00.486) 0:11:44.975 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 12:07:49 -0400 (0:00:00.407) 0:11:45.483 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 12:07:50 -0400 (0:00:00.993) 0:11:46.477 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 12:07:51 -0400 (0:00:00.946) 0:11:47.424 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 12:07:51 -0400 (0:00:00.397) 0:11:47.822 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 12:07:51 -0400 (0:00:00.291) 0:11:48.113 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 12:07:52 -0400 (0:00:00.371) 0:11:48.485 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 12:07:52 -0400 (0:00:00.450) 0:11:48.935 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 12:07:52 -0400 (0:00:00.313) 0:11:49.249 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 12:07:53 -0400 (0:00:00.408) 0:11:49.658 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 12:07:53 -0400 (0:00:00.265) 0:11:49.924 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 12:07:54 -0400 (0:00:01.248) 0:11:51.172 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 12:07:55 -0400 (0:00:00.980) 0:11:52.153 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 12:07:56 -0400 (0:00:00.427) 0:11:52.581 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 12:07:56 -0400 (0:00:00.548) 0:11:53.129 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 12:07:57 -0400 (0:00:00.434) 0:11:53.563 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 12:07:57 -0400 (0:00:00.406) 0:11:53.970 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 12:07:58 -0400 (0:00:00.497) 0:11:54.468 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 12:07:58 -0400 (0:00:00.465) 0:11:54.934 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 12:07:58 -0400 (0:00:00.432) 0:11:55.367 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 12:07:59 -0400 (0:00:00.857) 0:11:56.224 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 12:08:00 -0400 (0:00:00.518) 0:11:56.742 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 12:08:02 -0400 (0:00:02.195) 0:11:58.937 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 12:08:02 -0400 (0:00:00.439) 0:11:59.377 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 12:08:03 -0400 (0:00:00.497) 0:11:59.874 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 12:08:03 -0400 (0:00:00.480) 0:12:00.355 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 12:08:04 -0400 (0:00:00.520) 0:12:00.875 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 12:08:04 -0400 (0:00:00.336) 0:12:01.212 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 12:08:05 -0400 (0:00:00.289) 0:12:01.502 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 12:08:05 -0400 (0:00:00.304) 0:12:01.806 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 12:08:05 -0400 (0:00:00.322) 0:12:02.129 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 12:08:06 -0400 (0:00:00.466) 0:12:02.596 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 12:08:06 -0400 (0:00:00.304) 0:12:02.900 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 12:08:06 -0400 (0:00:00.358) 0:12:03.258 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 12:08:07 -0400 (0:00:00.501) 0:12:03.760 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 12:08:07 -0400 (0:00:00.410) 0:12:04.170 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 12:08:08 -0400 (0:00:00.514) 0:12:04.685 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 12:08:08 -0400 (0:00:00.183) 0:12:04.869 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 12:08:08 -0400 (0:00:00.145) 0:12:05.015 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 12:08:09 -0400 (0:00:00.444) 0:12:05.459 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 12:08:09 -0400 (0:00:00.621) 0:12:06.081 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 12:08:10 -0400 (0:00:00.561) 0:12:06.643 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751818004.7860436, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751818004.7860436, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 39930, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1751818004.7860436, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 12:08:12 -0400 (0:00:01.793) 0:12:08.436 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 12:08:12 -0400 (0:00:00.533) 0:12:08.970 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 12:08:13 -0400 (0:00:00.493) 0:12:09.464 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 12:08:13 -0400 (0:00:00.607) 0:12:10.071 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 12:08:14 -0400 (0:00:00.485) 0:12:10.557 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 12:08:14 -0400 (0:00:00.361) 0:12:10.918 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 12:08:15 -0400 (0:00:00.609) 0:12:11.528 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 12:08:15 -0400 (0:00:00.367) 0:12:11.895 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 12:08:18 -0400 (0:00:02.769) 0:12:14.664 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 12:08:18 -0400 (0:00:00.433) 0:12:15.097 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 12:08:19 -0400 (0:00:00.394) 0:12:15.492 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 12:08:19 -0400 (0:00:00.554) 0:12:16.047 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 12:08:20 -0400 (0:00:00.525) 0:12:16.572 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 12:08:20 -0400 (0:00:00.442) 0:12:17.015 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 12:08:21 -0400 (0:00:00.495) 0:12:17.510 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 12:08:21 -0400 (0:00:00.510) 0:12:18.020 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 12:08:21 -0400 (0:00:00.371) 0:12:18.392 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 12:08:23 -0400 (0:00:01.158) 0:12:19.550 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 12:08:23 -0400 (0:00:00.586) 0:12:20.137 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 12:08:24 -0400 (0:00:00.396) 0:12:20.533 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 12:08:24 -0400 (0:00:00.490) 0:12:21.023 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 12:08:25 -0400 (0:00:00.565) 0:12:21.589 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 12:08:25 -0400 (0:00:00.311) 0:12:21.900 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 12:08:25 -0400 (0:00:00.425) 0:12:22.326 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 12:08:26 -0400 (0:00:00.448) 0:12:22.774 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 12:08:26 -0400 (0:00:00.331) 0:12:23.105 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 12:08:27 -0400 (0:00:00.357) 0:12:23.463 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 12:08:27 -0400 (0:00:00.444) 0:12:23.908 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 12:08:28 -0400 (0:00:00.565) 0:12:24.474 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 12:08:28 -0400 (0:00:00.455) 0:12:24.929 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 12:08:28 -0400 (0:00:00.462) 0:12:25.391 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 12:08:29 -0400 (0:00:00.389) 0:12:25.781 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 12:08:29 -0400 (0:00:00.499) 0:12:26.281 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 12:08:31 -0400 (0:00:01.985) 0:12:28.266 *********** ok: [managed-node2] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 12:08:33 -0400 (0:00:01.744) 0:12:30.010 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_expected_size": "5368709120" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 12:08:34 -0400 (0:00:00.669) 0:12:30.680 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 12:08:34 -0400 (0:00:00.526) 0:12:31.206 *********** ok: [managed-node2] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 12:08:36 -0400 (0:00:02.076) 0:12:33.283 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 12:08:37 -0400 (0:00:00.586) 0:12:33.870 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 12:08:37 -0400 (0:00:00.493) 0:12:34.363 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 12:08:38 -0400 (0:00:00.641) 0:12:35.005 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 12:08:39 -0400 (0:00:00.615) 0:12:35.620 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 12:08:39 -0400 (0:00:00.368) 0:12:35.989 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 12:08:39 -0400 (0:00:00.398) 0:12:36.388 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 12:08:40 -0400 (0:00:00.388) 0:12:36.777 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 12:08:40 -0400 (0:00:00.363) 0:12:37.141 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 12:08:41 -0400 (0:00:00.397) 0:12:37.538 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 12:08:41 -0400 (0:00:00.552) 0:12:38.154 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 12:08:42 -0400 (0:00:00.329) 0:12:38.484 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 12:08:42 -0400 (0:00:00.407) 0:12:38.891 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 12:08:43 -0400 (0:00:00.523) 0:12:39.415 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 12:08:43 -0400 (0:00:00.522) 0:12:39.937 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 12:08:43 -0400 (0:00:00.425) 0:12:40.363 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 12:08:44 -0400 (0:00:00.443) 0:12:40.806 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 12:08:44 -0400 (0:00:00.407) 0:12:41.214 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 12:08:45 -0400 (0:00:00.461) 0:12:41.675 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 12:08:45 -0400 (0:00:00.484) 0:12:42.160 *********** ok: [managed-node2] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 12:08:46 -0400 (0:00:00.484) 0:12:42.645 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 12:08:46 -0400 (0:00:00.466) 0:12:43.112 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 12:08:47 -0400 (0:00:00.522) 0:12:43.634 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.018766", "end": "2025-07-06 12:08:48.573373", "rc": 0, "start": "2025-07-06 12:08:48.554607" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-a----- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 12:08:49 -0400 (0:00:01.799) 0:12:45.433 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 12:08:49 -0400 (0:00:00.543) 0:12:45.976 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 12:08:50 -0400 (0:00:00.584) 0:12:46.561 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 12:08:50 -0400 (0:00:00.502) 0:12:47.064 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 12:08:51 -0400 (0:00:00.491) 0:12:47.555 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 12:08:51 -0400 (0:00:00.409) 0:12:47.965 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 12:08:52 -0400 (0:00:00.491) 0:12:48.456 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 12:08:52 -0400 (0:00:00.533) 0:12:48.990 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 12:08:53 -0400 (0:00:00.465) 0:12:49.456 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up] **************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:116 Sunday 06 July 2025 12:08:53 -0400 (0:00:00.373) 0:12:49.829 *********** TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Sunday 06 July 2025 12:08:55 -0400 (0:00:01.686) 0:12:51.515 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Sunday 06 July 2025 12:08:55 -0400 (0:00:00.644) 0:12:52.160 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Sunday 06 July 2025 12:08:56 -0400 (0:00:00.557) 0:12:52.717 *********** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [managed-node2] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Sunday 06 July 2025 12:08:57 -0400 (0:00:00.945) 0:12:53.663 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Sunday 06 July 2025 12:08:57 -0400 (0:00:00.360) 0:12:54.023 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Sunday 06 July 2025 12:08:57 -0400 (0:00:00.377) 0:12:54.400 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Sunday 06 July 2025 12:08:58 -0400 (0:00:00.646) 0:12:55.046 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Sunday 06 July 2025 12:08:59 -0400 (0:00:00.417) 0:12:55.464 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Sunday 06 July 2025 12:09:00 -0400 (0:00:01.265) 0:12:56.729 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Sunday 06 July 2025 12:09:00 -0400 (0:00:00.407) 0:12:57.137 *********** ok: [managed-node2] => { "storage_pools | d([])": [ { "disks": [ "sda" ], "name": "foo", "state": "absent", "volumes": [ { "fs_type": "xfs", "mount_point": "/opt/test1", "name": "test1", "size": "5g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Sunday 06 July 2025 12:09:01 -0400 (0:00:00.452) 0:12:57.589 *********** ok: [managed-node2] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Sunday 06 July 2025 12:09:01 -0400 (0:00:00.556) 0:12:58.145 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Sunday 06 July 2025 12:09:02 -0400 (0:00:00.410) 0:12:58.556 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Sunday 06 July 2025 12:09:02 -0400 (0:00:00.431) 0:12:58.987 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Sunday 06 July 2025 12:09:03 -0400 (0:00:00.555) 0:12:59.542 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Sunday 06 July 2025 12:09:03 -0400 (0:00:00.517) 0:13:00.059 *********** ok: [managed-node2] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Sunday 06 July 2025 12:09:04 -0400 (0:00:00.662) 0:13:00.722 *********** TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Sunday 06 July 2025 12:09:04 -0400 (0:00:00.341) 0:13:01.063 *********** changed: [managed-node2] => { "actions": [ { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Sunday 06 July 2025 12:09:10 -0400 (0:00:06.074) 0:13:07.138 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Sunday 06 July 2025 12:09:11 -0400 (0:00:00.438) 0:13:07.577 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751818014.7830455, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "72884e3f126482c2d28276ff7c57744fa95eff91", "ctime": 1751818013.2330453, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 264045, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751818013.2330453, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1229, "uid": 0, "version": "1023161980", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Sunday 06 July 2025 12:09:13 -0400 (0:00:02.050) 0:13:09.628 *********** ok: [managed-node2] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Sunday 06 July 2025 12:09:14 -0400 (0:00:01.619) 0:13:11.247 *********** TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Sunday 06 July 2025 12:09:15 -0400 (0:00:00.442) 0:13:11.690 *********** ok: [managed-node2] => { "blivet_output": { "actions": [ { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Sunday 06 July 2025 12:09:15 -0400 (0:00:00.662) 0:13:12.353 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Sunday 06 July 2025 12:09:16 -0400 (0:00:00.682) 0:13:13.035 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Sunday 06 July 2025 12:09:17 -0400 (0:00:00.430) 0:13:13.466 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Sunday 06 July 2025 12:09:17 -0400 (0:00:00.511) 0:13:13.977 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Sunday 06 July 2025 12:09:18 -0400 (0:00:00.565) 0:13:14.543 *********** TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Sunday 06 July 2025 12:09:18 -0400 (0:00:00.438) 0:13:14.981 *********** TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Sunday 06 July 2025 12:09:18 -0400 (0:00:00.395) 0:13:15.377 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Sunday 06 July 2025 12:09:19 -0400 (0:00:00.421) 0:13:15.798 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751817219.282967, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Sunday 06 July 2025 12:09:21 -0400 (0:00:01.857) 0:13:17.656 *********** TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Sunday 06 July 2025 12:09:21 -0400 (0:00:00.294) 0:13:17.950 *********** ok: [managed-node2] TASK [Verify role results] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_fs.yml:130 Sunday 06 July 2025 12:09:23 -0400 (0:00:02.397) 0:13:20.348 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node2 TASK [Print out pool information] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Sunday 06 July 2025 12:09:24 -0400 (0:00:01.029) 0:13:21.377 *********** ok: [managed-node2] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "5g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Sunday 06 July 2025 12:09:26 -0400 (0:00:01.063) 0:13:22.441 *********** skipping: [managed-node2] => {} TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Sunday 06 July 2025 12:09:26 -0400 (0:00:00.552) 0:13:22.993 *********** ok: [managed-node2] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Sunday 06 July 2025 12:09:28 -0400 (0:00:01.768) 0:13:24.762 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002771", "end": "2025-07-06 12:09:29.791239", "rc": 0, "start": "2025-07-06 12:09:29.788468" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Sunday 06 July 2025 12:09:30 -0400 (0:00:01.943) 0:13:26.705 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003304", "end": "2025-07-06 12:09:31.879916", "failed_when_result": false, "rc": 0, "start": "2025-07-06 12:09:31.876612" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Sunday 06 July 2025 12:09:32 -0400 (0:00:01.993) 0:13:28.698 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node2 TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Sunday 06 July 2025 12:09:33 -0400 (0:00:00.774) 0:13:29.495 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Sunday 06 July 2025 12:09:33 -0400 (0:00:00.510) 0:13:30.006 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Sunday 06 July 2025 12:09:33 -0400 (0:00:00.307) 0:13:30.314 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Sunday 06 July 2025 12:09:34 -0400 (0:00:00.462) 0:13:30.777 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Sunday 06 July 2025 12:09:35 -0400 (0:00:01.041) 0:13:31.818 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Sunday 06 July 2025 12:09:35 -0400 (0:00:00.568) 0:13:32.387 *********** TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Sunday 06 July 2025 12:09:36 -0400 (0:00:00.477) 0:13:32.864 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Sunday 06 July 2025 12:09:36 -0400 (0:00:00.476) 0:13:33.341 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Sunday 06 July 2025 12:09:37 -0400 (0:00:00.550) 0:13:33.891 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Sunday 06 July 2025 12:09:38 -0400 (0:00:00.636) 0:13:34.527 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Sunday 06 July 2025 12:09:38 -0400 (0:00:00.342) 0:13:34.870 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Sunday 06 July 2025 12:09:38 -0400 (0:00:00.501) 0:13:35.371 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:54 Sunday 06 July 2025 12:09:39 -0400 (0:00:00.372) 0:13:35.743 *********** TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:67 Sunday 06 July 2025 12:09:39 -0400 (0:00:00.351) 0:13:36.095 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "rc": 1 } STDERR: Shared connection to 10.31.15.253 closed. MSG: non-zero return code TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:77 Sunday 06 July 2025 12:09:41 -0400 (0:00:01.697) 0:13:37.793 *********** TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87 Sunday 06 July 2025 12:09:41 -0400 (0:00:00.359) 0:13:38.152 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node2 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Sunday 06 July 2025 12:09:42 -0400 (0:00:00.903) 0:13:39.056 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Sunday 06 July 2025 12:09:43 -0400 (0:00:00.401) 0:13:39.458 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Sunday 06 July 2025 12:09:43 -0400 (0:00:00.334) 0:13:39.792 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Sunday 06 July 2025 12:09:43 -0400 (0:00:00.464) 0:13:40.257 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Sunday 06 July 2025 12:09:44 -0400 (0:00:00.506) 0:13:40.764 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Sunday 06 July 2025 12:09:44 -0400 (0:00:00.525) 0:13:41.290 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Sunday 06 July 2025 12:09:45 -0400 (0:00:00.486) 0:13:41.776 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Sunday 06 July 2025 12:09:45 -0400 (0:00:00.449) 0:13:42.225 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Sunday 06 July 2025 12:09:46 -0400 (0:00:00.501) 0:13:42.727 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Sunday 06 July 2025 12:09:46 -0400 (0:00:00.519) 0:13:43.246 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Sunday 06 July 2025 12:09:47 -0400 (0:00:00.484) 0:13:43.731 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90 Sunday 06 July 2025 12:09:47 -0400 (0:00:00.359) 0:13:44.091 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Sunday 06 July 2025 12:09:48 -0400 (0:00:00.902) 0:13:44.993 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8 Sunday 06 July 2025 12:09:49 -0400 (0:00:00.953) 0:13:45.947 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16 Sunday 06 July 2025 12:09:50 -0400 (0:00:00.510) 0:13:46.457 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20 Sunday 06 July 2025 12:09:50 -0400 (0:00:00.515) 0:13:46.973 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27 Sunday 06 July 2025 12:09:51 -0400 (0:00:00.545) 0:13:47.518 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31 Sunday 06 July 2025 12:09:52 -0400 (0:00:00.937) 0:13:48.456 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37 Sunday 06 July 2025 12:09:52 -0400 (0:00:00.382) 0:13:48.839 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42 Sunday 06 July 2025 12:09:52 -0400 (0:00:00.483) 0:13:49.322 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93 Sunday 06 July 2025 12:09:53 -0400 (0:00:00.480) 0:13:49.803 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Sunday 06 July 2025 12:09:54 -0400 (0:00:01.006) 0:13:50.809 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 TASK [Get information about thinpool] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8 Sunday 06 July 2025 12:09:55 -0400 (0:00:00.856) 0:13:51.666 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16 Sunday 06 July 2025 12:09:55 -0400 (0:00:00.415) 0:13:52.082 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22 Sunday 06 July 2025 12:09:56 -0400 (0:00:00.479) 0:13:52.561 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26 Sunday 06 July 2025 12:09:56 -0400 (0:00:00.472) 0:13:53.033 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96 Sunday 06 July 2025 12:09:57 -0400 (0:00:00.521) 0:13:53.554 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2 TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Sunday 06 July 2025 12:09:58 -0400 (0:00:01.116) 0:13:54.672 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Sunday 06 July 2025 12:09:58 -0400 (0:00:00.560) 0:13:55.232 *********** TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Sunday 06 July 2025 12:09:59 -0400 (0:00:00.442) 0:13:55.675 *********** TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Sunday 06 July 2025 12:09:59 -0400 (0:00:00.381) 0:13:56.056 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99 Sunday 06 July 2025 12:09:59 -0400 (0:00:00.338) 0:13:56.395 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Sunday 06 July 2025 12:10:00 -0400 (0:00:00.955) 0:13:57.351 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 TASK [Get information about VDO deduplication] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8 Sunday 06 July 2025 12:10:01 -0400 (0:00:00.656) 0:13:58.007 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15 Sunday 06 July 2025 12:10:02 -0400 (0:00:00.482) 0:13:58.490 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21 Sunday 06 July 2025 12:10:02 -0400 (0:00:00.511) 0:13:59.001 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27 Sunday 06 July 2025 12:10:03 -0400 (0:00:00.425) 0:13:59.427 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34 Sunday 06 July 2025 12:10:03 -0400 (0:00:00.513) 0:13:59.940 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40 Sunday 06 July 2025 12:10:03 -0400 (0:00:00.431) 0:14:00.372 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46 Sunday 06 July 2025 12:10:04 -0400 (0:00:00.464) 0:14:00.837 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102 Sunday 06 July 2025 12:10:04 -0400 (0:00:00.334) 0:14:01.172 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Sunday 06 July 2025 12:10:05 -0400 (0:00:00.935) 0:14:02.107 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print script output] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Sunday 06 July 2025 12:10:06 -0400 (0:00:00.526) 0:14:02.634 *********** skipping: [managed-node2] => {} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Sunday 06 July 2025 12:10:06 -0400 (0:00:00.462) 0:14:03.096 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Sunday 06 July 2025 12:10:07 -0400 (0:00:00.471) 0:14:03.568 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Sunday 06 July 2025 12:10:07 -0400 (0:00:00.456) 0:14:04.024 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Sunday 06 July 2025 12:10:08 -0400 (0:00:00.472) 0:14:04.496 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Sunday 06 July 2025 12:10:08 -0400 (0:00:00.408) 0:14:04.905 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:105 Sunday 06 July 2025 12:10:08 -0400 (0:00:00.314) 0:14:05.219 *********** ok: [managed-node2] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Sunday 06 July 2025 12:10:09 -0400 (0:00:00.443) 0:14:05.663 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node2 TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Sunday 06 July 2025 12:10:10 -0400 (0:00:00.821) 0:14:06.485 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Sunday 06 July 2025 12:10:10 -0400 (0:00:00.482) 0:14:06.967 *********** included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 included: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Sunday 06 July 2025 12:10:12 -0400 (0:00:02.020) 0:14:08.987 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Sunday 06 July 2025 12:10:13 -0400 (0:00:00.433) 0:14:09.420 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Sunday 06 July 2025 12:10:14 -0400 (0:00:01.348) 0:14:10.769 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Sunday 06 July 2025 12:10:14 -0400 (0:00:00.436) 0:14:11.205 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Sunday 06 July 2025 12:10:15 -0400 (0:00:00.312) 0:14:11.518 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Sunday 06 July 2025 12:10:15 -0400 (0:00:00.359) 0:14:11.877 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Sunday 06 July 2025 12:10:15 -0400 (0:00:00.443) 0:14:12.320 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Sunday 06 July 2025 12:10:16 -0400 (0:00:00.325) 0:14:12.646 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Sunday 06 July 2025 12:10:16 -0400 (0:00:00.434) 0:14:13.080 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Sunday 06 July 2025 12:10:17 -0400 (0:00:00.357) 0:14:13.438 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Sunday 06 July 2025 12:10:17 -0400 (0:00:00.404) 0:14:13.843 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Sunday 06 July 2025 12:10:17 -0400 (0:00:00.449) 0:14:14.292 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Sunday 06 July 2025 12:10:18 -0400 (0:00:00.684) 0:14:14.977 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Sunday 06 July 2025 12:10:18 -0400 (0:00:00.402) 0:14:15.380 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Sunday 06 July 2025 12:10:19 -0400 (0:00:00.500) 0:14:15.880 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Sunday 06 July 2025 12:10:19 -0400 (0:00:00.421) 0:14:16.302 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Sunday 06 July 2025 12:10:20 -0400 (0:00:00.593) 0:14:16.896 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Sunday 06 July 2025 12:10:20 -0400 (0:00:00.395) 0:14:17.291 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Sunday 06 July 2025 12:10:21 -0400 (0:00:00.381) 0:14:17.673 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Sunday 06 July 2025 12:10:21 -0400 (0:00:00.413) 0:14:18.087 *********** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Sunday 06 July 2025 12:10:23 -0400 (0:00:02.066) 0:14:20.153 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Sunday 06 July 2025 12:10:24 -0400 (0:00:00.409) 0:14:20.563 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Sunday 06 July 2025 12:10:24 -0400 (0:00:00.676) 0:14:21.239 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Sunday 06 July 2025 12:10:25 -0400 (0:00:00.351) 0:14:21.591 *********** ok: [managed-node2] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Sunday 06 July 2025 12:10:25 -0400 (0:00:00.599) 0:14:22.190 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Sunday 06 July 2025 12:10:26 -0400 (0:00:00.458) 0:14:22.648 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Sunday 06 July 2025 12:10:26 -0400 (0:00:00.432) 0:14:23.081 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Sunday 06 July 2025 12:10:27 -0400 (0:00:00.493) 0:14:23.575 *********** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Sunday 06 July 2025 12:10:30 -0400 (0:00:02.949) 0:14:26.525 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Sunday 06 July 2025 12:10:30 -0400 (0:00:00.473) 0:14:26.998 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Sunday 06 July 2025 12:10:30 -0400 (0:00:00.400) 0:14:27.398 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Sunday 06 July 2025 12:10:31 -0400 (0:00:00.381) 0:14:27.780 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Sunday 06 July 2025 12:10:31 -0400 (0:00:00.486) 0:14:28.267 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Sunday 06 July 2025 12:10:32 -0400 (0:00:00.422) 0:14:28.689 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Sunday 06 July 2025 12:10:32 -0400 (0:00:00.359) 0:14:29.048 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Sunday 06 July 2025 12:10:33 -0400 (0:00:00.368) 0:14:29.417 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Sunday 06 July 2025 12:10:33 -0400 (0:00:00.185) 0:14:29.603 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Sunday 06 July 2025 12:10:33 -0400 (0:00:00.434) 0:14:30.038 *********** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Sunday 06 July 2025 12:10:34 -0400 (0:00:00.590) 0:14:30.628 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Sunday 06 July 2025 12:10:34 -0400 (0:00:00.469) 0:14:31.098 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Sunday 06 July 2025 12:10:35 -0400 (0:00:00.373) 0:14:31.471 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Sunday 06 July 2025 12:10:35 -0400 (0:00:00.543) 0:14:32.015 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Sunday 06 July 2025 12:10:36 -0400 (0:00:00.510) 0:14:32.525 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Sunday 06 July 2025 12:10:36 -0400 (0:00:00.403) 0:14:32.929 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Sunday 06 July 2025 12:10:36 -0400 (0:00:00.420) 0:14:33.350 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Sunday 06 July 2025 12:10:37 -0400 (0:00:00.544) 0:14:33.894 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Sunday 06 July 2025 12:10:37 -0400 (0:00:00.395) 0:14:34.289 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Sunday 06 July 2025 12:10:38 -0400 (0:00:00.411) 0:14:34.701 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Sunday 06 July 2025 12:10:38 -0400 (0:00:00.605) 0:14:35.307 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Sunday 06 July 2025 12:10:39 -0400 (0:00:00.459) 0:14:35.766 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Sunday 06 July 2025 12:10:39 -0400 (0:00:00.542) 0:14:36.309 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Sunday 06 July 2025 12:10:40 -0400 (0:00:00.488) 0:14:36.798 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Sunday 06 July 2025 12:10:40 -0400 (0:00:00.432) 0:14:37.231 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Sunday 06 July 2025 12:10:41 -0400 (0:00:00.492) 0:14:37.723 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Sunday 06 July 2025 12:10:41 -0400 (0:00:00.315) 0:14:38.038 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Sunday 06 July 2025 12:10:42 -0400 (0:00:00.542) 0:14:38.581 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Sunday 06 July 2025 12:10:42 -0400 (0:00:00.432) 0:14:39.013 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Sunday 06 July 2025 12:10:43 -0400 (0:00:00.394) 0:14:39.407 *********** skipping: [managed-node2] => {} TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Sunday 06 July 2025 12:10:43 -0400 (0:00:00.616) 0:14:40.024 *********** skipping: [managed-node2] => {} TASK [Show test pool size] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Sunday 06 July 2025 12:10:44 -0400 (0:00:00.425) 0:14:40.449 *********** skipping: [managed-node2] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Sunday 06 July 2025 12:10:44 -0400 (0:00:00.474) 0:14:40.924 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Sunday 06 July 2025 12:10:45 -0400 (0:00:00.479) 0:14:41.404 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Sunday 06 July 2025 12:10:45 -0400 (0:00:00.514) 0:14:41.918 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Sunday 06 July 2025 12:10:46 -0400 (0:00:00.486) 0:14:42.404 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Sunday 06 July 2025 12:10:46 -0400 (0:00:00.393) 0:14:42.797 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Sunday 06 July 2025 12:10:46 -0400 (0:00:00.546) 0:14:43.343 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Sunday 06 July 2025 12:10:47 -0400 (0:00:00.563) 0:14:43.907 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Sunday 06 July 2025 12:10:47 -0400 (0:00:00.259) 0:14:44.166 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Sunday 06 July 2025 12:10:48 -0400 (0:00:00.466) 0:14:44.633 *********** skipping: [managed-node2] => {} TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Sunday 06 July 2025 12:10:48 -0400 (0:00:00.462) 0:14:45.095 *********** skipping: [managed-node2] => {} TASK [Show test volume size] *************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Sunday 06 July 2025 12:10:49 -0400 (0:00:00.533) 0:14:45.629 *********** skipping: [managed-node2] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Sunday 06 July 2025 12:10:49 -0400 (0:00:00.422) 0:14:46.052 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Sunday 06 July 2025 12:10:50 -0400 (0:00:00.833) 0:14:46.886 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Sunday 06 July 2025 12:10:50 -0400 (0:00:00.289) 0:14:47.175 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Sunday 06 July 2025 12:10:51 -0400 (0:00:00.346) 0:14:47.522 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Sunday 06 July 2025 12:10:51 -0400 (0:00:00.413) 0:14:47.935 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Sunday 06 July 2025 12:10:52 -0400 (0:00:00.516) 0:14:48.452 *********** ok: [managed-node2] => { "storage_test_actual_size": { "changed": false, "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Sunday 06 July 2025 12:10:52 -0400 (0:00:00.559) 0:14:49.012 *********** ok: [managed-node2] => { "storage_test_expected_size": "5368709120" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Sunday 06 July 2025 12:10:53 -0400 (0:00:00.434) 0:14:49.446 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Sunday 06 July 2025 12:10:53 -0400 (0:00:00.304) 0:14:49.751 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Sunday 06 July 2025 12:10:53 -0400 (0:00:00.451) 0:14:50.202 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Sunday 06 July 2025 12:10:54 -0400 (0:00:00.399) 0:14:50.601 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Sunday 06 July 2025 12:10:54 -0400 (0:00:00.418) 0:14:51.020 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Sunday 06 July 2025 12:10:54 -0400 (0:00:00.361) 0:14:51.381 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Sunday 06 July 2025 12:10:55 -0400 (0:00:00.426) 0:14:51.808 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Sunday 06 July 2025 12:10:55 -0400 (0:00:00.474) 0:14:52.282 *********** skipping: [managed-node2] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Sunday 06 July 2025 12:10:56 -0400 (0:00:00.419) 0:14:52.702 *********** ok: [managed-node2] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Sunday 06 July 2025 12:10:56 -0400 (0:00:00.468) 0:14:53.170 *********** TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Sunday 06 July 2025 12:10:57 -0400 (0:00:00.436) 0:14:53.606 *********** ok: [managed-node2] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* managed-node2 : ok=619 changed=8 unreachable=0 failed=0 skipped=687 rescued=0 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Sunday 06 July 2025 12:10:57 -0400 (0:00:00.353) 0:14:53.960 *********** =============================================================================== fedora.linux_system_roles.storage : Make sure blivet is available ------ 10.69s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.56s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.29s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.07s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.98s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.83s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.66s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Ensure cryptsetup is present -------------------------------------------- 5.39s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 5.05s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 fedora.linux_system_roles.storage : Get service facts ------------------- 3.75s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Ensure test packages ---------------------------------------------------- 3.50s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present --- 3.47s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 fedora.linux_system_roles.storage : Update facts ------------------------ 3.37s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Run test verify for storage_test_volume_subset -------------------------- 3.35s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 fedora.linux_system_roles.storage : Remove obsolete mounts -------------- 3.28s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Ensure cryptsetup is present -------------------------------------------- 3.21s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Ensure cryptsetup is present -------------------------------------------- 3.20s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 fedora.linux_system_roles.storage : Set up new/current mounts ----------- 3.16s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Find unused disks in the system ----------------------------------------- 3.15s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 fedora.linux_system_roles.storage : Update facts ------------------------ 3.12s /tmp/collections-xuI/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224