# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.16.0] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-weekly-ciy6hs_iyw/.collection executable location = /opt/ansible-2.16/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_lvm_percent_size.yml ******************************************* 1 plays in /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml PLAY [Test specifying size as a percentage] ************************************ TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:2 Thursday 27 June 2024 03:22:04 +0000 (0:00:00.011) 0:00:00.011 ********* ok: [sut] TASK [Run the role] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:17 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.968) 0:00:00.980 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.016) 0:00:00.997 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.013) 0:00:01.010 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.018) 0:00:01.029 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.023) 0:00:01.052 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.179) 0:00:01.232 ********* ok: [sut] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.016) 0:00:01.249 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:22:05 +0000 (0:00:00.009) 0:00:01.258 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:22:06 +0000 (0:00:00.009) 0:00:01.267 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:22:06 +0000 (0:00:00.024) 0:00:01.292 ********* changed: [sut] => { "changed": true, "changes": { "installed": [ "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "Loaded plugins: fastestmirror\nLoading mirror speeds from cached hostfile\n * base: download.cf.centos.org\n * extras: download.cf.centos.org\n * updates: download.cf.centos.org\nResolving Dependencies\n--> Running transaction check\n---> Package libblockdev-crypto.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libblockdev-utils(x86-64) = 2.18-5.el7 for package: libblockdev-crypto-2.18-5.el7.x86_64\n--> Processing Dependency: libvolume_key.so.1()(64bit) for package: libblockdev-crypto-2.18-5.el7.x86_64\n--> Processing Dependency: libbd_utils.so.2()(64bit) for package: libblockdev-crypto-2.18-5.el7.x86_64\n---> Package libblockdev-dm.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libdmraid.so.1(Base)(64bit) for package: libblockdev-dm-2.18-5.el7.x86_64\n--> Processing Dependency: dmraid for package: libblockdev-dm-2.18-5.el7.x86_64\n--> Processing Dependency: libdmraid.so.1()(64bit) for package: libblockdev-dm-2.18-5.el7.x86_64\n---> Package libblockdev-lvm.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: lvm2 for package: libblockdev-lvm-2.18-5.el7.x86_64\n--> Processing Dependency: device-mapper-persistent-data for package: libblockdev-lvm-2.18-5.el7.x86_64\n---> Package libblockdev-mdraid.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: mdadm for package: libblockdev-mdraid-2.18-5.el7.x86_64\n--> Processing Dependency: libbytesize.so.1()(64bit) for package: libblockdev-mdraid-2.18-5.el7.x86_64\n---> Package libblockdev-swap.x86_64 0:2.18-5.el7 will be installed\n---> Package python2-blivet3.noarch 1:3.1.3-3.el7 will be installed\n--> Processing Dependency: blivet3-data = 1:3.1.3-3.el7 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: python2-bytesize >= 0.3 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: python2-blockdev >= 2.17 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: pyparted >= 3.9 for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Processing Dependency: lsof for package: 1:python2-blivet3-3.1.3-3.el7.noarch\n--> Running transaction check\n---> Package blivet3-data.noarch 1:3.1.3-3.el7 will be installed\n---> Package device-mapper-persistent-data.x86_64 0:0.8.5-3.el7_9.2 will be installed\n--> Processing Dependency: libaio.so.1(LIBAIO_0.4)(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n--> Processing Dependency: libaio.so.1(LIBAIO_0.1)(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n--> Processing Dependency: libaio.so.1()(64bit) for package: device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64\n---> Package dmraid.x86_64 0:1.0.0.rc16-28.el7 will be installed\n--> Processing Dependency: libdevmapper-event.so.1.02(Base)(64bit) for package: dmraid-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: dmraid-events for package: dmraid-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: libdevmapper-event.so.1.02()(64bit) for package: dmraid-1.0.0.rc16-28.el7.x86_64\n---> Package libblockdev-utils.x86_64 0:2.18-5.el7 will be installed\n---> Package libbytesize.x86_64 0:1.2-1.el7 will be installed\n---> Package lsof.x86_64 0:4.87-6.el7 will be installed\n---> Package lvm2.x86_64 7:2.02.187-6.el7_9.5 will be installed\n--> Processing Dependency: lvm2-libs = 7:2.02.187-6.el7_9.5 for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n--> Processing Dependency: liblvm2app.so.2.2(Base)(64bit) for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n--> Processing Dependency: liblvm2app.so.2.2()(64bit) for package: 7:lvm2-2.02.187-6.el7_9.5.x86_64\n---> Package mdadm.x86_64 0:4.1-9.el7_9 will be installed\n---> Package pyparted.x86_64 1:3.9-15.el7 will be installed\n---> Package python2-blockdev.x86_64 0:2.18-5.el7 will be installed\n--> Processing Dependency: libblockdev(x86-64) = 2.18-5.el7 for package: python2-blockdev-2.18-5.el7.x86_64\n---> Package python2-bytesize.x86_64 0:1.2-1.el7 will be installed\n---> Package volume_key-libs.x86_64 0:0.3.9-9.el7 will be installed\n--> Running transaction check\n---> Package device-mapper-event-libs.x86_64 7:1.02.170-6.el7_9.5 will be installed\n---> Package dmraid-events.x86_64 0:1.0.0.rc16-28.el7 will be installed\n--> Processing Dependency: sgpio for package: dmraid-events-1.0.0.rc16-28.el7.x86_64\n--> Processing Dependency: device-mapper-event for package: dmraid-events-1.0.0.rc16-28.el7.x86_64\n---> Package libaio.x86_64 0:0.3.109-13.el7 will be installed\n---> Package libblockdev.x86_64 0:2.18-5.el7 will be installed\n---> Package lvm2-libs.x86_64 7:2.02.187-6.el7_9.5 will be installed\n--> Running transaction check\n---> Package device-mapper-event.x86_64 7:1.02.170-6.el7_9.5 will be installed\n---> Package sgpio.x86_64 0:1.2.0.10-13.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository\n Size\n================================================================================\nInstalling:\n libblockdev-crypto x86_64 2.18-5.el7 base 60 k\n libblockdev-dm x86_64 2.18-5.el7 base 54 k\n libblockdev-lvm x86_64 2.18-5.el7 base 62 k\n libblockdev-mdraid x86_64 2.18-5.el7 base 57 k\n libblockdev-swap x86_64 2.18-5.el7 base 52 k\n python2-blivet3 noarch 1:3.1.3-3.el7 base 851 k\nInstalling for dependencies:\n blivet3-data noarch 1:3.1.3-3.el7 base 77 k\n device-mapper-event x86_64 7:1.02.170-6.el7_9.5 updates 192 k\n device-mapper-event-libs x86_64 7:1.02.170-6.el7_9.5 updates 192 k\n device-mapper-persistent-data x86_64 0.8.5-3.el7_9.2 updates 423 k\n dmraid x86_64 1.0.0.rc16-28.el7 base 151 k\n dmraid-events x86_64 1.0.0.rc16-28.el7 base 21 k\n libaio x86_64 0.3.109-13.el7 base 24 k\n libblockdev x86_64 2.18-5.el7 base 119 k\n libblockdev-utils x86_64 2.18-5.el7 base 58 k\n libbytesize x86_64 1.2-1.el7 base 52 k\n lsof x86_64 4.87-6.el7 base 331 k\n lvm2 x86_64 7:2.02.187-6.el7_9.5 updates 1.3 M\n lvm2-libs x86_64 7:2.02.187-6.el7_9.5 updates 1.1 M\n mdadm x86_64 4.1-9.el7_9 updates 439 k\n pyparted x86_64 1:3.9-15.el7 base 195 k\n python2-blockdev x86_64 2.18-5.el7 base 61 k\n python2-bytesize x86_64 1.2-1.el7 base 22 k\n sgpio x86_64 1.2.0.10-13.el7 base 13 k\n volume_key-libs x86_64 0.3.9-9.el7 base 141 k\n\nTransaction Summary\n================================================================================\nInstall 6 Packages (+19 Dependent packages)\n\nTotal download size: 6.0 M\nInstalled size: 16 M\nDownloading packages:\n--------------------------------------------------------------------------------\nTotal 4.3 MB/s | 6.0 MB 00:01 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : libblockdev-utils-2.18-5.el7.x86_64 1/25 \n Installing : 7:device-mapper-event-libs-1.02.170-6.el7_9.5.x86_64 2/25 \n Installing : libaio-0.3.109-13.el7.x86_64 3/25 \n Installing : device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64 4/25 \n Installing : 7:device-mapper-event-1.02.170-6.el7_9.5.x86_64 5/25 \n Installing : libbytesize-1.2-1.el7.x86_64 6/25 \n Installing : python2-bytesize-1.2-1.el7.x86_64 7/25 \n Installing : 7:lvm2-libs-2.02.187-6.el7_9.5.x86_64 8/25 \n Installing : 7:lvm2-2.02.187-6.el7_9.5.x86_64 9/25 \n Installing : libblockdev-2.18-5.el7.x86_64 10/25 \n Installing : python2-blockdev-2.18-5.el7.x86_64 11/25 \n Installing : 1:pyparted-3.9-15.el7.x86_64 12/25 \n Installing : sgpio-1.2.0.10-13.el7.x86_64 13/25 \n Installing : dmraid-1.0.0.rc16-28.el7.x86_64 14/25 \n Installing : dmraid-events-1.0.0.rc16-28.el7.x86_64 15/25 \n Installing : mdadm-4.1-9.el7_9.x86_64 16/25 \n Installing : volume_key-libs-0.3.9-9.el7.x86_64 17/25 \n Installing : 1:blivet3-data-3.1.3-3.el7.noarch 18/25 \n Installing : lsof-4.87-6.el7.x86_64 19/25 \n Installing : 1:python2-blivet3-3.1.3-3.el7.noarch 20/25 \n Installing : libblockdev-crypto-2.18-5.el7.x86_64 21/25 \n Installing : libblockdev-mdraid-2.18-5.el7.x86_64 22/25 \n Installing : libblockdev-dm-2.18-5.el7.x86_64 23/25 \n Installing : libblockdev-lvm-2.18-5.el7.x86_64 24/25 \n Installing : libblockdev-swap-2.18-5.el7.x86_64 25/25 \n Verifying : 7:device-mapper-event-1.02.170-6.el7_9.5.x86_64 1/25 \n Verifying : libblockdev-swap-2.18-5.el7.x86_64 2/25 \n Verifying : libblockdev-lvm-2.18-5.el7.x86_64 3/25 \n Verifying : lsof-4.87-6.el7.x86_64 4/25 \n Verifying : libblockdev-mdraid-2.18-5.el7.x86_64 5/25 \n Verifying : 1:blivet3-data-3.1.3-3.el7.noarch 6/25 \n Verifying : dmraid-events-1.0.0.rc16-28.el7.x86_64 7/25 \n Verifying : python2-blockdev-2.18-5.el7.x86_64 8/25 \n Verifying : libblockdev-dm-2.18-5.el7.x86_64 9/25 \n Verifying : libaio-0.3.109-13.el7.x86_64 10/25 \n Verifying : 7:lvm2-libs-2.02.187-6.el7_9.5.x86_64 11/25 \n Verifying : python2-bytesize-1.2-1.el7.x86_64 12/25 \n Verifying : libblockdev-2.18-5.el7.x86_64 13/25 \n Verifying : libbytesize-1.2-1.el7.x86_64 14/25 \n Verifying : 7:device-mapper-event-libs-1.02.170-6.el7_9.5.x86_64 15/25 \n Verifying : 7:lvm2-2.02.187-6.el7_9.5.x86_64 16/25 \n Verifying : libblockdev-utils-2.18-5.el7.x86_64 17/25 \n Verifying : volume_key-libs-0.3.9-9.el7.x86_64 18/25 \n Verifying : device-mapper-persistent-data-0.8.5-3.el7_9.2.x86_64 19/25 \n Verifying : 1:python2-blivet3-3.1.3-3.el7.noarch 20/25 \n Verifying : dmraid-1.0.0.rc16-28.el7.x86_64 21/25 \n Verifying : mdadm-4.1-9.el7_9.x86_64 22/25 \n Verifying : sgpio-1.2.0.10-13.el7.x86_64 23/25 \n Verifying : libblockdev-crypto-2.18-5.el7.x86_64 24/25 \n Verifying : 1:pyparted-3.9-15.el7.x86_64 25/25 \n\nInstalled:\n libblockdev-crypto.x86_64 0:2.18-5.el7 libblockdev-dm.x86_64 0:2.18-5.el7 \n libblockdev-lvm.x86_64 0:2.18-5.el7 libblockdev-mdraid.x86_64 0:2.18-5.el7\n libblockdev-swap.x86_64 0:2.18-5.el7 python2-blivet3.noarch 1:3.1.3-3.el7 \n\nDependency Installed:\n blivet3-data.noarch 1:3.1.3-3.el7 \n device-mapper-event.x86_64 7:1.02.170-6.el7_9.5 \n device-mapper-event-libs.x86_64 7:1.02.170-6.el7_9.5 \n device-mapper-persistent-data.x86_64 0:0.8.5-3.el7_9.2 \n dmraid.x86_64 0:1.0.0.rc16-28.el7 \n dmraid-events.x86_64 0:1.0.0.rc16-28.el7 \n libaio.x86_64 0:0.3.109-13.el7 \n libblockdev.x86_64 0:2.18-5.el7 \n libblockdev-utils.x86_64 0:2.18-5.el7 \n libbytesize.x86_64 0:1.2-1.el7 \n lsof.x86_64 0:4.87-6.el7 \n lvm2.x86_64 7:2.02.187-6.el7_9.5 \n lvm2-libs.x86_64 7:2.02.187-6.el7_9.5 \n mdadm.x86_64 0:4.1-9.el7_9 \n pyparted.x86_64 1:3.9-15.el7 \n python2-blockdev.x86_64 0:2.18-5.el7 \n python2-bytesize.x86_64 0:1.2-1.el7 \n sgpio.x86_64 0:1.2.0.10-13.el7 \n volume_key-libs.x86_64 0:0.3.9-9.el7 \n\nComplete!\n" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:22:16 +0000 (0:00:10.098) 0:00:11.391 ********* ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined. 'storage_pools' is undefined" } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.011) 0:00:11.402 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.010) 0:00:11.413 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.499) 0:00:11.912 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.019) 0:00:11.932 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.015) 0:00:11.947 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.015) 0:00:11.963 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:22:16 +0000 (0:00:00.014) 0:00:11.977 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:22:17 +0000 (0:00:00.298) 0:00:12.276 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:22:17 +0000 (0:00:00.904) 0:00:13.180 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:22:17 +0000 (0:00:00.028) 0:00:13.209 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:22:17 +0000 (0:00:00.009) 0:00:13.218 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.274) 0:00:13.493 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.018) 0:00:13.511 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.008) 0:00:13.519 ********* ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.012) 0:00:13.532 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.011) 0:00:13.543 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.012) 0:00:13.556 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.017) 0:00:13.574 ********* skipping: [sut] => { "changed": false, "false_condition": "blivet_output['mounts']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.009) 0:00:13.583 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.017) 0:00:13.601 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.017) 0:00:13.618 ********* skipping: [sut] => { "changed": false, "false_condition": "blivet_output['mounts']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.009) 0:00:13.628 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.133) 0:00:13.762 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:22:18 +0000 (0:00:00.008) 0:00:13.770 ********* ok: [sut] TASK [Get unused disks] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:21 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.664) 0:00:14.435 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml for sut TASK [Ensure test packages] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:2 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.016) 0:00:14.452 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "util-linux-2.23.2-65.el7_9.1.x86_64 providing util-linux is already installed" ] } lsrpackages: util-linux TASK [Find unused disks in the system] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:11 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.299) 0:00:14.751 ********* ok: [sut] => { "changed": false, "disks": [ "sda" ] } TASK [Debug why there are no unused disks] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:20 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.219) 0:00:14.971 ********* skipping: [sut] => { "changed": false, "false_condition": "'Unable to find unused disk' in unused_disks_return.disks", "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:29 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.009) 0:00:14.981 ********* ok: [sut] => { "ansible_facts": { "unused_disks": [ "sda" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:34 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.012) 0:00:14.994 ********* skipping: [sut] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/get_unused_disk.yml:39 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.017) 0:00:15.011 ********* ok: [sut] => { "unused_disks": [ "sda" ] } TASK [Test for correct handling of invalid percentage-based size specification.] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:27 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.012) 0:00:15.024 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml for sut TASK [Store global variable value copy] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml:4 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.015) 0:00:15.039 ********* ok: [sut] => { "ansible_facts": { "storage_pools_global": [], "storage_safe_mode_global": false, "storage_volumes_global": [] }, "changed": false } TASK [Verify role raises correct error] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml:10 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.020) 0:00:15.059 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.017) 0:00:15.077 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.015) 0:00:15.092 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.019) 0:00:15.111 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.026) 0:00:15.137 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.012) 0:00:15.150 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.013) 0:00:15.163 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.011) 0:00:15.174 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.011) 0:00:15.185 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:22:19 +0000 (0:00:00.026) 0:00:15.211 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:22:20 +0000 (0:00:00.492) 0:00:15.704 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "2x%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:22:20 +0000 (0:00:00.029) 0:00:15.734 ********* ok: [sut] => { "storage_volumes": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:22:20 +0000 (0:00:00.014) 0:00:15.748 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:22:24 +0000 (0:00:03.743) 0:00:19.491 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:22:24 +0000 (0:00:00.021) 0:00:19.513 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:22:24 +0000 (0:00:00.017) 0:00:19.530 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:22:24 +0000 (0:00:00.020) 0:00:19.550 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:22:24 +0000 (0:00:00.016) 0:00:19.567 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:22:24 +0000 (0:00:00.297) 0:00:19.865 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:22:25 +0000 (0:00:00.741) 0:00:20.606 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:22:25 +0000 (0:00:00.030) 0:00:20.636 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:22:25 +0000 (0:00:00.008) 0:00:20.645 ********* fatal: [sut]: FAILED! => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } MSG: invalid percentage '2x%' size specified in pool 'foo' TASK [fedora.linux_system_roles.storage : Failed message] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:91 Thursday 27 June 2024 03:22:29 +0000 (0:00:04.031) 0:00:24.676 ********* fatal: [sut]: FAILED! => { "changed": false } MSG: {'failed': True, 'pools': [], 'leaves': [], 'changed': False, 'actions': [], 'crypts': [], 'volumes': [], 'invocation': {'module_args': {'packages_only': False, 'disklabel_type': None, 'diskvolume_mkfs_option_map': {'ext4': '-F', 'ext3': '-F', 'ext2': '-F'}, 'safe_mode': False, 'pools': [{'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'fs_type': None, 'mount_options': None, 'size': '2x%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': None, 'raid_level': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': None, 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_user': None, 'raid_disks': [], 'cache_mode': None, 'name': 'test1', 'mount_group': None, 'type': None, 'cached': None, 'thin': False, 'cache_size': None, 'cache_devices': [], 'fs_create_options': None}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}], 'volumes': [], 'pool_defaults': {'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_cipher': None, 'disks': [], 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_device_count': None, 'state': 'present', 'volumes': [], 'shared': False, 'raid_level': None, 'type': 'lvm', 'grow_to_fill': False, 'raid_spare_count': None, 'raid_chunk_size': None}, 'volume_defaults': {'raid_metadata_version': None, 'raid_level': None, 'fs_type': 'xfs', 'mount_options': 'defaults', 'size': 0, 'mount_point': '', 'compression': None, 'encryption_password': None, 'encryption': False, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'thin_pool_name': None, 'fs_overwrite_existing': True, 'encryption_key_size': None, 'encryption_cipher': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'cache_size': 0, 'raid_spare_count': None, 'cache_mode': None, 'cache_devices': [], 'deduplication': None, 'cached': False, 'type': 'lvm', 'disks': [], 'thin': None, 'mount_check': 0, 'mount_passno': 0, 'raid_chunk_size': None, 'thin_pool_size': None, 'fs_create_options': ''}, 'use_partitions': None}}, 'mounts': [], 'packages': [], 'msg': "invalid percentage '2x%' size specified in pool 'foo'", '_ansible_no_log': None} TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.015) 0:00:24.692 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that we failed in the role] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml:23 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.008) 0:00:24.700 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the blivet output and error message are correct] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml:28 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.011) 0:00:24.712 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify correct exception or error message] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-failed.yml:39 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.018) 0:00:24.730 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_failed_exception is defined", "skip_reason": "Conditional result was False" } TASK [Create two LVM logical volumes under volume group 'foo' using percentage sizes] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:44 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.011) 0:00:24.742 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.023) 0:00:24.765 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.016) 0:00:24.781 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.019) 0:00:24.801 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.026) 0:00:24.827 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.012) 0:00:24.839 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.011) 0:00:24.851 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.010) 0:00:24.862 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.011) 0:00:24.873 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:22:29 +0000 (0:00:00.025) 0:00:24.899 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:22:30 +0000 (0:00:00.493) 0:00:25.392 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "60%" }, { "fs_type": "ext4", "mount_point": "/opt/test2", "name": "test2", "size": "40%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:22:30 +0000 (0:00:00.014) 0:00:25.407 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:22:30 +0000 (0:00:00.013) 0:00:25.420 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "e2fsprogs", "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:22:33 +0000 (0:00:03.820) 0:00:29.240 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:22:33 +0000 (0:00:00.020) 0:00:29.261 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:22:34 +0000 (0:00:00.016) 0:00:29.278 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:22:34 +0000 (0:00:00.017) 0:00:29.295 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:22:34 +0000 (0:00:00.017) 0:00:29.313 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "e2fsprogs-1.42.9-19.el7.x86_64 providing e2fsprogs is already installed", "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: e2fsprogs kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:22:34 +0000 (0:00:00.299) 0:00:29.612 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:22:35 +0000 (0:00:00.742) 0:00:30.354 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:22:35 +0000 (0:00:00.029) 0:00:30.384 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:22:35 +0000 (0:00:00.008) 0:00:30.393 ********* changed: [sut] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1", "/dev/mapper/foo-test2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:22:40 +0000 (0:00:05.142) 0:00:35.535 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.018) 0:00:35.554 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.008) 0:00:35.562 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "create device", "device": "/dev/mapper/foo-test1", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/mapper/foo-test1", "/dev/mapper/foo-test2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.015) 0:00:35.578 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.014) 0:00:35.592 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.011) 0:00:35.604 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.022) 0:00:35.626 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:22:40 +0000 (0:00:00.437) 0:00:36.063 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:22:41 +0000 (0:00:00.389) 0:00:36.452 ********* skipping: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:22:41 +0000 (0:00:00.026) 0:00:36.479 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:22:41 +0000 (0:00:00.251) 0:00:36.730 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:22:41 +0000 (0:00:00.128) 0:00:36.859 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:22:41 +0000 (0:00:00.010) 0:00:36.870 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:62 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.617) 0:00:37.487 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.020) 0:00:37.508 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.060) 0:00:37.569 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.019) 0:00:37.588 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.183) 0:00:37.771 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003180", "end": "2024-06-27 03:22:42.671613", "rc": 0, "start": "2024-06-27 03:22:42.668433" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 /dev/mapper/foo-test2 /opt/test2 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.179) 0:00:37.950 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003087", "end": "2024-06-27 03:22:42.793197", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:22:42.790110" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.121) 0:00:38.072 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}, {'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.035) 0:00:38.107 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.012) 0:00:38.119 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019351", "end": "2024-06-27 03:22:42.982623", "rc": 0, "start": "2024-06-27 03:22:42.963272" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:22:42 +0000 (0:00:00.143) 0:00:38.263 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.015) 0:00:38.279 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.023) 0:00:38.303 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.027) 0:00:38.330 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.178) 0:00:38.508 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.021) 0:00:38.530 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.022) 0:00:38.553 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.024) 0:00:38.578 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.013) 0:00:38.592 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.021) 0:00:38.613 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.010) 0:00:38.623 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.016) 0:00:38.640 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.188) 0:00:38.828 ********* skipping: [sut] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "false_condition": "grow_supported.stdout | trim == 'True'", "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.023) 0:00:38.852 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.021) 0:00:38.873 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.883 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.893 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.010) 0:00:38.903 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.912 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.922 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.931 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.941 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.950 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.010) 0:00:38.961 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.009) 0:00:38.970 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.011) 0:00:38.982 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.021) 0:00:39.003 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.025) 0:00:39.028 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.013) 0:00:39.042 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.013) 0:00:39.055 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.068 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.081 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.013) 0:00:39.094 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.107 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.120 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.133 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.145 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.158 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.013) 0:00:39.172 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.185 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.197 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.012) 0:00:39.210 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.023) 0:00:39.233 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:22:43 +0000 (0:00:00.022) 0:00:39.256 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.266 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.275 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.285 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.029) 0:00:39.314 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.325 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:39.335 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.345 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.356 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.024) 0:00:39.381 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.021) 0:00:39.402 ********* skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.414 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml for sut => (item=/dev/sda) TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:2 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.019) 0:00:39.433 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:6 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.025) 0:00:39.459 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:14 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.022) 0:00:39.482 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:23 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.018) 0:00:39.500 ********* skipping: [sut] => { "changed": false, "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:32 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.018) 0:00:39.519 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:41 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.018) 0:00:39.537 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.548 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.012) 0:00:39.561 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.024) 0:00:39.586 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.025) 0:00:39.611 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:39.621 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.631 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.641 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.650 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.660 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:39.671 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.682 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.692 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.702 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.711 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.721 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:39.731 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.741 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.753 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.025) 0:00:39.779 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.789 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:39.799 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.809 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.818 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:39.828 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.839 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:39.851 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': [], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.024) 0:00:39.875 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.022) 0:00:39.897 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.051) 0:00:39.949 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.016) 0:00:39.966 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.022) 0:00:39.989 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:40.000 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.013) 0:00:40.013 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:40.023 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:40.033 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.028) 0:00:40.062 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.011) 0:00:40.073 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:40.083 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.009) 0:00:40.093 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:40.103 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.035) 0:00:40.139 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.024) 0:00:40.164 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.021) 0:00:40.186 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.018) 0:00:40.204 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.010) 0:00:40.215 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.022) 0:00:40.237 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:22:44 +0000 (0:00:00.021) 0:00:40.259 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458560.1992779, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458560.1992779, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 57345, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458560.1992779, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.130) 0:00:40.389 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.014) 0:00:40.403 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.010) 0:00:40.413 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.012) 0:00:40.426 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.011) 0:00:40.438 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.009) 0:00:40.448 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.013) 0:00:40.462 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:22:45 +0000 (0:00:00.009) 0:00:40.471 ********* changed: [sut] => { "changed": true, "changes": { "installed": [ "cryptsetup" ] }, "rc": 0, "results": [ "Loaded plugins: fastestmirror\nLoading mirror speeds from cached hostfile\n * base: download.cf.centos.org\n * extras: download.cf.centos.org\n * updates: download.cf.centos.org\nResolving Dependencies\n--> Running transaction check\n---> Package cryptsetup.x86_64 0:2.0.3-6.el7 will be installed\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nInstalling:\n cryptsetup x86_64 2.0.3-6.el7 base 154 k\n\nTransaction Summary\n================================================================================\nInstall 1 Package\n\nTotal download size: 154 k\nInstalled size: 354 k\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Installing : cryptsetup-2.0.3-6.el7.x86_64 1/1 \n Verifying : cryptsetup-2.0.3-6.el7.x86_64 1/1 \n\nInstalled:\n cryptsetup.x86_64 0:2.0.3-6.el7 \n\nComplete!\n" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:22:46 +0000 (0:00:01.704) 0:00:42.176 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.011) 0:00:42.188 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.010) 0:00:42.198 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.023) 0:00:42.222 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.012) 0:00:42.235 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.011) 0:00:42.246 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:22:46 +0000 (0:00:00.011) 0:00:42.258 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.011) 0:00:42.269 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:42.280 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.023) 0:00:42.304 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.024) 0:00:42.328 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.018) 0:00:42.346 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.018) 0:00:42.365 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.018) 0:00:42.383 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:42.394 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.404 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:42.415 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.424 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.434 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.444 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.454 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.463 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:42.474 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:42.484 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:42.494 ********* ok: [sut] => { "bytes": 6442450944, "changed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.232) 0:00:42.726 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.020) 0:00:42.747 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.020) 0:00:42.768 ********* ok: [sut] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined. 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.013) 0:00:42.781 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.139) 0:00:42.921 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.025) 0:00:42.946 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.024) 0:00:42.970 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.021) 0:00:42.992 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "6442450944.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.022) 0:00:43.015 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.011) 0:00:43.026 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:43.036 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.046 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.056 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.065 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.075 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:43.086 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.095 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.105 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.114 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.124 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.009) 0:00:43.133 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.029) 0:00:43.163 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.011) 0:00:43.174 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.010) 0:00:43.185 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 6442450944, "changed": false, "failed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.012) 0:00:43.197 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.011) 0:00:43.208 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:22:47 +0000 (0:00:00.023) 0:00:43.232 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.023581", "end": "2024-06-27 03:22:48.102590", "rc": 0, "start": "2024-06-27 03:22:48.079009" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.151) 0:00:43.383 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.021) 0:00:43.405 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.021) 0:00:43.427 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.018) 0:00:43.445 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.018) 0:00:43.463 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.018) 0:00:43.482 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.019) 0:00:43.501 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.512 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.021) 0:00:43.534 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.048) 0:00:43.582 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.016) 0:00:43.599 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.022) 0:00:43.621 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.632 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.014) 0:00:43.646 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.656 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.667 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.009) 0:00:43.677 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.009) 0:00:43.686 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.009) 0:00:43.696 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.706 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:43.717 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.033) 0:00:43.750 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.021) 0:00:43.772 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.021) 0:00:43.793 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.018) 0:00:43.811 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.011) 0:00:43.823 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.022) 0:00:43.846 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.022) 0:00:43.868 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458559.7752738, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458559.7752738, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 56835, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458559.7752738, "nlink": 1, "path": "/dev/mapper/foo-test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.140) 0:00:44.008 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.014) 0:00:44.023 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.010) 0:00:44.033 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.014) 0:00:44.047 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.011) 0:00:44.059 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.009) 0:00:44.069 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.012) 0:00:44.082 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:22:48 +0000 (0:00:00.009) 0:00:44.091 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.292) 0:00:44.384 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.012) 0:00:44.396 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.406 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.022) 0:00:44.429 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.439 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.449 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.458 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.469 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.480 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.023) 0:00:44.504 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.021) 0:00:44.525 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.018) 0:00:44.544 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.018) 0:00:44.562 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.021) 0:00:44.583 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.011) 0:00:44.594 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.604 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.614 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.624 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.633 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.030) 0:00:44.664 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.011) 0:00:44.675 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.686 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.697 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:44.707 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:44.717 ********* ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.132) 0:00:44.849 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.021) 0:00:44.870 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.019) 0:00:44.890 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.014) 0:00:44.904 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.130) 0:00:45.034 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.027) 0:00:45.061 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.029) 0:00:45.091 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.023) 0:00:45.114 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.023) 0:00:45.137 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.148 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.158 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:45.168 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.179 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:45.189 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:45.199 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.009) 0:00:45.209 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.219 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.230 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.012) 0:00:45.242 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.252 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:22:49 +0000 (0:00:00.010) 0:00:45.262 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.009) 0:00:45.272 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.009) 0:00:45.281 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.009) 0:00:45.291 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.013) 0:00:45.304 ********* ok: [sut] => { "storage_test_expected_size": "4294967296.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.011) 0:00:45.316 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.023) 0:00:45.340 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test2" ], "delta": "0:00:00.018628", "end": "2024-06-27 03:22:50.199383", "rc": 0, "start": "2024-06-27 03:22:50.180755" } STDOUT: LVM2_LV_NAME=test2 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.139) 0:00:45.480 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.022) 0:00:45.502 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.022) 0:00:45.524 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.020) 0:00:45.545 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.019) 0:00:45.565 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.020) 0:00:45.585 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.020) 0:00:45.605 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.011) 0:00:45.617 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.009) 0:00:45.626 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat the previous invocation to verify idempotence] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:65 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.012) 0:00:45.639 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.028) 0:00:45.668 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.017) 0:00:45.686 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.023) 0:00:45.709 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.029) 0:00:45.739 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.015) 0:00:45.754 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.013) 0:00:45.768 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.012) 0:00:45.780 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.012) 0:00:45.793 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:22:50 +0000 (0:00:00.028) 0:00:45.821 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:22:51 +0000 (0:00:00.483) 0:00:46.304 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "60%" }, { "mount_point": "/opt/test2", "name": "test2", "size": "40%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:22:51 +0000 (0:00:00.015) 0:00:46.320 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:22:51 +0000 (0:00:00.013) 0:00:46.333 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:22:55 +0000 (0:00:04.313) 0:00:50.647 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:22:55 +0000 (0:00:00.022) 0:00:50.669 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:22:55 +0000 (0:00:00.017) 0:00:50.687 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:22:55 +0000 (0:00:00.018) 0:00:50.706 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:22:55 +0000 (0:00:00.017) 0:00:50.723 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:22:55 +0000 (0:00:00.353) 0:00:51.077 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:22:56 +0000 (0:00:00.788) 0:00:51.866 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:22:56 +0000 (0:00:00.031) 0:00:51.897 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:22:56 +0000 (0:00:00.011) 0:00:51.908 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/mapper/foo-test2", "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:23:01 +0000 (0:00:04.646) 0:00:56.554 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.020) 0:00:56.575 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.010) 0:00:56.585 ********* ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test2", "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.017) 0:00:56.603 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.015) 0:00:56.619 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.014) 0:00:56.633 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.020) 0:00:56.654 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.272) 0:00:56.926 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:23:01 +0000 (0:00:00.320) 0:00:57.247 ********* skipping: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:23:02 +0000 (0:00:00.028) 0:00:57.275 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:23:02 +0000 (0:00:00.448) 0:00:57.724 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:23:02 +0000 (0:00:00.136) 0:00:57.860 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:23:02 +0000 (0:00:00.010) 0:00:57.871 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:80 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.628) 0:00:58.499 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.023) 0:00:58.522 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.023) 0:00:58.546 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.019) 0:00:58.565 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.134) 0:00:58.699 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002981", "end": "2024-06-27 03:23:03.542014", "rc": 0, "start": "2024-06-27 03:23:03.539033" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 /dev/mapper/foo-test2 /opt/test2 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.122) 0:00:58.822 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003001", "end": "2024-06-27 03:23:03.670049", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:23:03.667048" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.128) 0:00:58.951 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}, {'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.033) 0:00:58.984 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.011) 0:00:58.996 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.020346", "end": "2024-06-27 03:23:03.860515", "rc": 0, "start": "2024-06-27 03:23:03.840169" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.145) 0:00:59.141 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.016) 0:00:59.158 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.022) 0:00:59.180 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:23:03 +0000 (0:00:00.026) 0:00:59.207 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.122) 0:00:59.330 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.023) 0:00:59.353 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.023) 0:00:59.376 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.021) 0:00:59.398 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.013) 0:00:59.411 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.022) 0:00:59.434 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.011) 0:00:59.446 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.018) 0:00:59.465 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.184) 0:00:59.649 ********* skipping: [sut] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "false_condition": "grow_supported.stdout | trim == 'True'", "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.021) 0:00:59.671 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.021) 0:00:59.692 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.010) 0:00:59.703 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.010) 0:00:59.713 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.723 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.733 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.743 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.753 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.762 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.010) 0:00:59.773 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.783 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:00:59.793 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.012) 0:00:59.805 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.024) 0:00:59.829 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.078) 0:00:59.907 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:00:59.922 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.013) 0:00:59.936 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.013) 0:00:59.949 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:00:59.963 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:00:59.978 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:00:59.992 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:01:00.007 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:01:00.022 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:01:00.036 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.015) 0:01:00.052 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:01:00.067 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.013) 0:01:00.080 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.014) 0:01:00.095 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.013) 0:01:00.109 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.025) 0:01:00.134 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.024) 0:01:00.159 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.011) 0:01:00.171 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.011) 0:01:00.182 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.011) 0:01:00.193 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.012) 0:01:00.206 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.010) 0:01:00.216 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.010) 0:01:00.227 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.009) 0:01:00.236 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:23:04 +0000 (0:00:00.012) 0:01:00.249 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.023) 0:01:00.272 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.020) 0:01:00.293 ********* skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.305 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml for sut => (item=/dev/sda) TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:2 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.018) 0:01:00.324 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:6 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.023) 0:01:00.347 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:14 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.023) 0:01:00.370 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:23 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.019) 0:01:00.389 ********* skipping: [sut] => { "changed": false, "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:32 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.018) 0:01:00.408 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:41 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.018) 0:01:00.426 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:00.439 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.450 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.024) 0:01:00.475 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.025) 0:01:00.500 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.511 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.521 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.009) 0:01:00.531 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.009) 0:01:00.541 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.009) 0:01:00.551 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.562 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.573 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.584 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.594 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.009) 0:01:00.604 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.009) 0:01:00.614 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.625 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.635 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.010) 0:01:00.646 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.025) 0:01:00.672 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.683 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.034) 0:01:00.717 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:00.730 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.741 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.752 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:00.765 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:00.777 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '40%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.026) 0:01:00.804 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.022) 0:01:00.827 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.048) 0:01:00.875 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.018) 0:01:00.894 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.025) 0:01:00.919 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.930 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.014) 0:01:00.945 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.956 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.967 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:00.978 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:00.991 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:01.002 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:01.014 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.012) 0:01:01.027 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.034) 0:01:01.061 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.022) 0:01:01.084 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.024) 0:01:01.108 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.018) 0:01:01.127 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.011) 0:01:01.138 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.023) 0:01:01.161 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:05 +0000 (0:00:00.022) 0:01:01.184 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458560.1992779, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458560.1992779, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 57345, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458560.1992779, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.134) 0:01:01.319 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.016) 0:01:01.335 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.346 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.013) 0:01:01.360 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.372 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.382 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.013) 0:01:01.395 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.406 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.297) 0:01:01.703 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.714 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.010) 0:01:01.724 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.022) 0:01:01.747 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.757 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.768 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.779 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.790 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.010) 0:01:01.801 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.023) 0:01:01.825 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.021) 0:01:01.846 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.019) 0:01:01.866 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.018) 0:01:01.884 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.018) 0:01:01.903 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.914 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.010) 0:01:01.924 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.934 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:01.945 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.010) 0:01:01.955 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.010) 0:01:01.965 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.975 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.985 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.009) 0:01:01.995 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.011) 0:01:02.006 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.034) 0:01:02.040 ********* ok: [sut] => { "bytes": 6442450944, "changed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.135) 0:01:02.176 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.020) 0:01:02.196 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.019) 0:01:02.216 ********* ok: [sut] => { "storage_test_expected_size": "4294967296.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:06 +0000 (0:00:00.012) 0:01:02.228 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.126) 0:01:02.354 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.029) 0:01:02.383 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.025) 0:01:02.409 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.023) 0:01:02.432 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "6442450944.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.023) 0:01:02.456 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.466 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.476 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.011) 0:01:02.488 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.498 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:02.508 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.518 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:02.527 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:02.537 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.548 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:02.558 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.568 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.578 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.589 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:02.598 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.609 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 6442450944, "changed": false, "failed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.012) 0:01:02.622 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.011) 0:01:02.634 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.023) 0:01:02.657 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.020938", "end": "2024-06-27 03:23:07.521508", "rc": 0, "start": "2024-06-27 03:23:07.500570" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.144) 0:01:02.801 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.021) 0:01:02.823 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.024) 0:01:02.847 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.019) 0:01:02.866 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.018) 0:01:02.885 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.018) 0:01:02.904 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.018) 0:01:02.922 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:02.933 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.024) 0:01:02.958 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.047) 0:01:03.006 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.016) 0:01:03.023 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.022) 0:01:03.045 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:03.056 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.013) 0:01:03.069 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.011) 0:01:03.081 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:03.091 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:03.101 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.010) 0:01:03.111 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:03.121 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.009) 0:01:03.130 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.012) 0:01:03.142 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.033) 0:01:03.176 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.022) 0:01:03.198 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.021) 0:01:03.220 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.018) 0:01:03.238 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:07 +0000 (0:00:00.011) 0:01:03.250 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.023) 0:01:03.274 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.022) 0:01:03.296 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458581.225476, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458581.225476, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 56835, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458581.225476, "nlink": 1, "path": "/dev/mapper/foo-test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.130) 0:01:03.427 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.014) 0:01:03.441 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:03.452 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.013) 0:01:03.465 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.013) 0:01:03.478 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.033) 0:01:03.511 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.014) 0:01:03.526 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:03.537 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.294) 0:01:03.832 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:03.843 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:03.853 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.026) 0:01:03.880 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:03.891 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:03.902 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:03.913 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:03.924 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:03.935 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.027) 0:01:03.962 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.021) 0:01:03.984 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.018) 0:01:04.003 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.018) 0:01:04.021 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.018) 0:01:04.040 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:04.051 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.011) 0:01:04.062 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.073 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.083 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.009) 0:01:04.093 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.103 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.009) 0:01:04.113 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.124 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.134 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.144 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:08 +0000 (0:00:00.010) 0:01:04.155 ********* ok: [sut] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.128) 0:01:04.283 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.020) 0:01:04.304 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.023) 0:01:04.327 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.012) 0:01:04.340 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.128) 0:01:04.468 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "40%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.025) 0:01:04.494 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "4G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.024) 0:01:04.518 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.023) 0:01:04.541 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "4294967296.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.026) 0:01:04.568 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.579 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.590 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.600 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.610 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.619 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.011) 0:01:04.631 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.641 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.651 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.660 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:04.670 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.680 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.011) 0:01:04.692 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.703 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.713 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:04.723 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.012) 0:01:04.736 ********* ok: [sut] => { "storage_test_expected_size": "4294967296.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.011) 0:01:04.748 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.027) 0:01:04.775 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test2" ], "delta": "0:00:00.021228", "end": "2024-06-27 03:23:09.638553", "rc": 0, "start": "2024-06-27 03:23:09.617325" } STDOUT: LVM2_LV_NAME=test2 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.143) 0:01:04.918 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.022) 0:01:04.941 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.022) 0:01:04.963 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.018) 0:01:04.982 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.018) 0:01:05.000 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.022) 0:01:05.022 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.018) 0:01:05.041 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.011) 0:01:05.053 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.009) 0:01:05.062 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Shrink test2 volume via percentage-based size spec] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:83 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.010) 0:01:05.073 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.031) 0:01:05.105 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.016) 0:01:05.121 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.021) 0:01:05.142 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.029) 0:01:05.172 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.040) 0:01:05.212 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.015) 0:01:05.228 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.012) 0:01:05.240 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:23:09 +0000 (0:00:00.012) 0:01:05.253 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:23:10 +0000 (0:00:00.027) 0:01:05.280 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:23:10 +0000 (0:00:00.493) 0:01:05.773 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "60%" }, { "mount_point": "/opt/test2", "name": "test2", "size": "25%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:23:10 +0000 (0:00:00.016) 0:01:05.789 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:23:10 +0000 (0:00:00.013) 0:01:05.803 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:23:14 +0000 (0:00:04.195) 0:01:09.998 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:23:14 +0000 (0:00:00.024) 0:01:10.023 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:23:14 +0000 (0:00:00.019) 0:01:10.042 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:23:14 +0000 (0:00:00.019) 0:01:10.061 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:23:14 +0000 (0:00:00.018) 0:01:10.079 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:23:15 +0000 (0:00:00.305) 0:01:10.385 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:23:15 +0000 (0:00:00.772) 0:01:11.157 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:23:15 +0000 (0:00:00.031) 0:01:11.188 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:23:15 +0000 (0:00:00.009) 0:01:11.198 ********* changed: [sut] => { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "resize device", "device": "/dev/mapper/foo-test2", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test2", "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:23:20 +0000 (0:00:05.052) 0:01:16.251 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.022) 0:01:16.273 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.010) 0:01:16.284 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "resize format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "resize device", "device": "/dev/mapper/foo-test2", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test2", "/dev/mapper/foo-test1", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "xfsprogs", "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.016) 0:01:16.300 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.015) 0:01:16.316 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.013) 0:01:16.329 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.020) 0:01:16.350 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.272) 0:01:16.623 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.286) 0:01:16.909 ********* skipping: [sut] => (item={'src': '/dev/mapper/foo-test1', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'xfs', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test1', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.027) 0:01:16.937 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:23:21 +0000 (0:00:00.250) 0:01:17.188 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.128) 0:01:17.317 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.010) 0:01:17.327 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:98 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.619) 0:01:17.947 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.024) 0:01:17.971 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.025) 0:01:17.997 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.018) 0:01:18.016 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "2.5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:23:22 +0000 (0:00:00.130) 0:01:18.146 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003019", "end": "2024-06-27 03:23:22.988925", "rc": 0, "start": "2024-06-27 03:23:22.985906" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0 /dev/mapper/foo-test2 /opt/test2 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.122) 0:01:18.268 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003023", "end": "2024-06-27 03:23:23.111710", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:23:23.108687" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.123) 0:01:18.392 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}, {'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.035) 0:01:18.427 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.011) 0:01:18.439 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019218", "end": "2024-06-27 03:23:23.300498", "rc": 0, "start": "2024-06-27 03:23:23.281280" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.142) 0:01:18.581 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.015) 0:01:18.597 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.023) 0:01:18.620 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.026) 0:01:18.647 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.119) 0:01:18.767 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.021) 0:01:18.788 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.022) 0:01:18.811 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.022) 0:01:18.834 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.013) 0:01:18.847 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.021) 0:01:18.868 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.011) 0:01:18.879 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.015) 0:01:18.895 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.170) 0:01:19.065 ********* skipping: [sut] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "false_condition": "grow_supported.stdout | trim == 'True'", "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.022) 0:01:19.088 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.048) 0:01:19.136 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.011) 0:01:19.148 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.158 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.169 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.179 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.190 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.200 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.009) 0:01:19.210 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.009) 0:01:19.220 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.009) 0:01:19.229 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.011) 0:01:19.240 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:23:23 +0000 (0:00:00.010) 0:01:19.251 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.021) 0:01:19.273 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.025) 0:01:19.299 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.312 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.326 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.339 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.353 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.366 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.014) 0:01:19.381 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.394 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.407 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.421 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.012) 0:01:19.434 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.447 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.014) 0:01:19.461 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.013) 0:01:19.474 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.012) 0:01:19.487 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.022) 0:01:19.510 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.024) 0:01:19.534 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.544 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.009) 0:01:19.554 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.009) 0:01:19.564 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.574 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.585 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.009) 0:01:19.595 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.009) 0:01:19.605 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.616 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.024) 0:01:19.640 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.020) 0:01:19.661 ********* skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.673 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml for sut => (item=/dev/sda) TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:2 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.018) 0:01:19.692 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:6 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.022) 0:01:19.714 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:14 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.022) 0:01:19.737 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:23 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.018) 0:01:19.755 ********* skipping: [sut] => { "changed": false, "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:32 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.018) 0:01:19.774 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:41 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.018) 0:01:19.792 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.803 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.814 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.025) 0:01:19.839 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.024) 0:01:19.864 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:19.875 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.886 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.037) 0:01:19.924 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.936 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.947 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.958 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.970 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:19.981 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.012) 0:01:19.993 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.004 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.015 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.026 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.036 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.047 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.027) 0:01:20.075 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.085 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.009) 0:01:20.095 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.105 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.011) 0:01:20.116 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.126 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.137 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.010) 0:01:20.148 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-1', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-1', 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.023) 0:01:20.172 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.022) 0:01:20.194 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.047) 0:01:20.241 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:24 +0000 (0:00:00.016) 0:01:20.258 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.022) 0:01:20.281 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:20.292 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.013) 0:01:20.306 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.011) 0:01:20.317 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:20.328 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:20.339 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.009) 0:01:20.349 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.009) 0:01:20.359 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.009) 0:01:20.369 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.012) 0:01:20.381 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test1 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.033) 0:01:20.414 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.021) 0:01:20.436 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.021) 0:01:20.458 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.018) 0:01:20.477 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.011) 0:01:20.488 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.026) 0:01:20.514 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.023) 0:01:20.538 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458560.1992779, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458560.1992779, "dev": 5, "device_type": 64769, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 57345, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458560.1992779, "nlink": 1, "path": "/dev/mapper/foo-test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.130) 0:01:20.668 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.014) 0:01:20.683 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:20.693 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.013) 0:01:20.707 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.013) 0:01:20.720 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:20.731 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.012) 0:01:20.744 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.009) 0:01:20.754 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.295) 0:01:21.050 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.011) 0:01:21.061 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.011) 0:01:21.072 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.022) 0:01:21.095 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.011) 0:01:21.106 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:21.116 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:21.127 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.010) 0:01:21.138 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.012) 0:01:21.150 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.023) 0:01:21.174 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.021) 0:01:21.195 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.018) 0:01:21.214 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.018) 0:01:21.232 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:25 +0000 (0:00:00.018) 0:01:21.250 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.039) 0:01:21.290 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.011) 0:01:21.302 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.313 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.324 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.334 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.344 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.011) 0:01:21.356 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.366 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.376 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.386 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.396 ********* ok: [sut] => { "bytes": 6442450944, "changed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.128) 0:01:21.525 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.022) 0:01:21.547 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.020) 0:01:21.568 ********* ok: [sut] => { "storage_test_expected_size": "4294967296.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.012) 0:01:21.580 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.131) 0:01:21.711 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.025) 0:01:21.737 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "2.5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.024) 0:01:21.761 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.024) 0:01:21.786 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "6442450944.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.024) 0:01:21.811 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.011) 0:01:21.822 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.833 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.843 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.009) 0:01:21.853 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.011) 0:01:21.864 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.874 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.884 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.894 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.009) 0:01:21.904 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.009) 0:01:21.914 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.011) 0:01:21.925 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.935 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.945 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.956 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.010) 0:01:21.966 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 6442450944, "changed": false, "failed": false, "lvm": "6g", "parted": "6GiB", "size": "6 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.012) 0:01:21.978 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.013) 0:01:21.991 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.024) 0:01:22.016 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test1" ], "delta": "0:00:00.017858", "end": "2024-06-27 03:23:26.879524", "rc": 0, "start": "2024-06-27 03:23:26.861666" } STDOUT: LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.144) 0:01:22.160 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.021) 0:01:22.181 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.021) 0:01:22.203 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.018) 0:01:22.222 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.019) 0:01:22.242 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:26 +0000 (0:00:00.018) 0:01:22.261 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.018) 0:01:22.279 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:22.291 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.021) 0:01:22.312 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.048) 0:01:22.361 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.017) 0:01:22.378 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.024) 0:01:22.403 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.414 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.013) 0:01:22.428 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.439 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.009) 0:01:22.449 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.009) 0:01:22.459 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.470 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.480 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.490 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:22.501 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.033) 0:01:22.534 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.021) 0:01:22.555 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.052) 0:01:22.608 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.020) 0:01:22.629 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.012) 0:01:22.641 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.023) 0:01:22.665 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.022) 0:01:22.688 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458600.9056618, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458600.9056618, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 56835, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458600.9056618, "nlink": 1, "path": "/dev/mapper/foo-test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.132) 0:01:22.820 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.016) 0:01:22.836 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:22.847 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.014) 0:01:22.862 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.012) 0:01:22.875 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:22.886 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.013) 0:01:22.899 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:22.911 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.290) 0:01:23.201 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:23.213 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.010) 0:01:23.223 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.022) 0:01:23.246 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:27 +0000 (0:00:00.011) 0:01:23.257 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.269 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.280 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.291 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.302 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.023) 0:01:23.326 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.021) 0:01:23.348 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.020) 0:01:23.368 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.019) 0:01:23.387 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.018) 0:01:23.406 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.417 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.428 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.009) 0:01:23.438 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.449 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.459 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.470 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.480 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.490 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.501 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.512 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.522 ********* ok: [sut] => { "bytes": 2684354560, "changed": false, "lvm": "2g", "parted": "2GiB", "size": "2 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.127) 0:01:23.650 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.020) 0:01:23.671 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.019) 0:01:23.690 ********* ok: [sut] => { "storage_test_expected_size": "6442450944.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.012) 0:01:23.703 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.132) 0:01:23.835 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.025) 0:01:23.861 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/mapper/foo-test1", "size": "6G", "type": "lvm", "uuid": "31a36f06-0c78-4dfd-884e-d65ca5af8039" }, "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "2.5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.025) 0:01:23.886 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.023) 0:01:23.909 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "2684354560.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.024) 0:01:23.933 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.945 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.012) 0:01:23.957 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.968 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:23.980 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:23.990 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:24.000 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.009) 0:01:24.010 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:24.021 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:24.031 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:24.041 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:24.051 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.009) 0:01:24.061 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.009) 0:01:24.071 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.011) 0:01:24.082 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.010) 0:01:24.092 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 2684354560, "changed": false, "failed": false, "lvm": "2g", "parted": "2GiB", "size": "2 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.013) 0:01:24.106 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.012) 0:01:24.118 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:28 +0000 (0:00:00.023) 0:01:24.141 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test2" ], "delta": "0:00:00.019800", "end": "2024-06-27 03:23:29.005493", "rc": 0, "start": "2024-06-27 03:23:28.985693" } STDOUT: LVM2_LV_NAME=test2 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.145) 0:01:24.286 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.023) 0:01:24.310 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.086) 0:01:24.396 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.020) 0:01:24.417 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.018) 0:01:24.435 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.018) 0:01:24.454 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.018) 0:01:24.473 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.011) 0:01:24.484 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.010) 0:01:24.494 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get the size of test2 volume] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:101 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.010) 0:01:24.505 ********* ok: [sut] => { "changed": false, "cmd": [ "lsblk", "--noheadings", "-o", "SIZE", "/dev/mapper/foo-test2" ], "delta": "0:00:00.004361", "end": "2024-06-27 03:23:29.350861", "rc": 0, "start": "2024-06-27 03:23:29.346500" } STDOUT: 2.5G TASK [Remove the test1 volume without changing its size] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:106 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.124) 0:01:24.630 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.036) 0:01:24.667 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.016) 0:01:24.683 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.021) 0:01:24.705 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.028) 0:01:24.733 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.014) 0:01:24.748 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.014) 0:01:24.763 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.012) 0:01:24.775 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.012) 0:01:24.787 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:23:29 +0000 (0:00:00.026) 0:01:24.814 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:23:30 +0000 (0:00:00.483) 0:01:25.298 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "state": "present", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "60%", "state": "absent" }, { "mount_point": "/opt/test2", "name": "test2", "size": "25%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:23:30 +0000 (0:00:00.015) 0:01:25.314 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:23:30 +0000 (0:00:00.013) 0:01:25.327 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:23:34 +0000 (0:00:04.261) 0:01:29.589 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:23:34 +0000 (0:00:00.022) 0:01:29.612 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:23:34 +0000 (0:00:00.022) 0:01:29.634 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:23:34 +0000 (0:00:00.019) 0:01:29.654 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:23:34 +0000 (0:00:00.018) 0:01:29.672 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:23:34 +0000 (0:00:00.298) 0:01:29.971 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:23:35 +0000 (0:00:00.764) 0:01:30.735 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:23:35 +0000 (0:00:00.031) 0:01:30.766 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:23:35 +0000 (0:00:00.009) 0:01:30.776 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test2", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:23:40 +0000 (0:00:04.892) 0:01:35.668 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.020) 0:01:35.689 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.010) 0:01:35.699 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test1", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/foo-test1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test2", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.016) 0:01:35.716 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.015) 0:01:35.732 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.013) 0:01:35.746 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test1', 'state': 'absent', 'fstype': 'xfs', 'path': '/opt/test1'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "/dev/mapper/foo-test1", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test1" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.149) 0:01:35.895 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:23:40 +0000 (0:00:00.260) 0:01:36.156 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:23:41 +0000 (0:00:00.157) 0:01:36.313 ********* skipping: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:23:41 +0000 (0:00:00.026) 0:01:36.340 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:23:41 +0000 (0:00:00.246) 0:01:36.587 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:23:41 +0000 (0:00:00.131) 0:01:36.718 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:23:41 +0000 (0:00:00.010) 0:01:36.729 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:123 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.622) 0:01:37.351 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.026) 0:01:37.377 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.024) 0:01:37.401 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.020) 0:01:37.422 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "2.5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.128) 0:01:37.550 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003047", "end": "2024-06-27 03:23:42.394029", "rc": 0, "start": "2024-06-27 03:23:42.390982" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test2 /opt/test2 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.123) 0:01:37.674 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003041", "end": "2024-06-27 03:23:42.519002", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:23:42.515961" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.123) 0:01:37.797 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'absent', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}, {'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.034) 0:01:37.832 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.054) 0:01:37.887 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019400", "end": "2024-06-27 03:23:42.752340", "rc": 0, "start": "2024-06-27 03:23:42.732940" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.145) 0:01:38.032 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.016) 0:01:38.049 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.023) 0:01:38.073 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.030) 0:01:38.103 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.124) 0:01:38.228 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:23:42 +0000 (0:00:00.023) 0:01:38.251 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.024) 0:01:38.275 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.298 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.312 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.335 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.011) 0:01:38.346 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.017) 0:01:38.364 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.162) 0:01:38.527 ********* skipping: [sut] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "false_condition": "grow_supported.stdout | trim == 'True'", "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.550 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.022) 0:01:38.572 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.011) 0:01:38.583 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.011) 0:01:38.594 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.605 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.616 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.626 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.012) 0:01:38.639 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.649 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.660 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.671 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.681 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:38.692 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.716 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'absent', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.025) 0:01:38.741 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.014) 0:01:38.755 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.014) 0:01:38.769 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.783 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.796 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.809 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.823 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.836 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.014) 0:01:38.851 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.864 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.877 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.891 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.905 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:38.918 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.014) 0:01:38.933 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.956 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'absent', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:38.980 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.011) 0:01:38.991 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:39.002 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:39.012 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:39.023 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.009) 0:01:39.032 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.009) 0:01:39.042 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.010) 0:01:39.053 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.011) 0:01:39.064 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.024) 0:01:39.089 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.021) 0:01:39.110 ********* skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.013) 0:01:39.124 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml for sut => (item=/dev/sda) TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:2 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.019) 0:01:39.143 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:6 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.023) 0:01:39.166 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:14 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.022) 0:01:39.188 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:23 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.018) 0:01:39.207 ********* skipping: [sut] => { "changed": false, "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:32 Thursday 27 June 2024 03:23:43 +0000 (0:00:00.019) 0:01:39.226 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:41 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.088) 0:01:39.315 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.012) 0:01:39.328 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.339 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.025) 0:01:39.364 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'absent', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.026) 0:01:39.390 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.401 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.412 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.423 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.434 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.446 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.456 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.467 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.478 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.489 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.499 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.511 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.522 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.533 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.544 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.026) 0:01:39.571 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.582 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.593 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.603 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.613 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.624 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.012) 0:01:39.636 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.648 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test1', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'xfs', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test1', 'size': '60%', 'mount_point': '/opt/test1', 'compression': None, 'encryption_password': None, 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'absent', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test1', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], 'cache_mode': None, 'cache_devices': [], 'name': 'test1', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '25%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.022) 0:01:39.671 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.022) 0:01:39.693 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.048) 0:01:39.741 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.018) 0:01:39.760 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.023) 0:01:39.783 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.794 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.804 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.815 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.009) 0:01:39.825 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.836 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.847 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.857 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.867 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:39.878 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.033) 0:01:39.912 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.018) 0:01:39.931 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.023) 0:01:39.954 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.018) 0:01:39.973 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.984 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:39.995 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.018) 0:01:40.014 ********* ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.125) 0:01:40.139 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.011) 0:01:40.150 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.013) 0:01:40.164 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.009) 0:01:40.174 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.012) 0:01:40.186 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:40.197 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.010) 0:01:40.208 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:44 +0000 (0:00:00.042) 0:01:40.251 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.300) 0:01:40.551 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.563 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.574 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:40.584 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.595 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:40.608 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:40.619 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.630 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.642 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.026) 0:01:40.668 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.022) 0:01:40.691 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.022) 0:01:40.714 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.018) 0:01:40.733 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.019) 0:01:40.752 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:40.764 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.776 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.787 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:40.800 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.811 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.823 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.834 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.845 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.856 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:40.869 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:40.880 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.018) 0:01:40.899 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.018) 0:01:40.918 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.017) 0:01:40.936 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:40.948 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.021) 0:01:40.970 ********* skipping: [sut] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.018) 0:01:40.988 ********* skipping: [sut] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.017) 0:01:41.006 ********* skipping: [sut] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.017) 0:01:41.024 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.017) 0:01:41.041 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.052 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:41.065 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:41.076 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.087 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.097 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.107 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.009) 0:01:41.117 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:41.128 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.139 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.150 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.160 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.009) 0:01:41.170 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.009) 0:01:41.180 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.011) 0:01:41.191 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.201 ********* ok: [sut] => { "storage_test_actual_size": { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.013) 0:01:41.214 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.012) 0:01:41.227 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.017) 0:01:41.245 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:45 +0000 (0:00:00.010) 0:01:41.255 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.012) 0:01:41.267 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.278 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.011) 0:01:41.289 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.300 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.311 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.322 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.012) 0:01:41.334 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.022) 0:01:41.356 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.049) 0:01:41.406 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.017) 0:01:41.423 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.022) 0:01:41.446 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.457 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.013) 0:01:41.471 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.011) 0:01:41.483 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.494 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.504 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.514 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.525 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.010) 0:01:41.536 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.012) 0:01:41.548 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.034) 0:01:41.583 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.022) 0:01:41.605 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.021) 0:01:41.627 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.018) 0:01:41.646 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.011) 0:01:41.657 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.027) 0:01:41.684 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.023) 0:01:41.707 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458600.9056618, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458600.9056618, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 56835, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458600.9056618, "nlink": 1, "path": "/dev/mapper/foo-test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.131) 0:01:41.839 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.014) 0:01:41.854 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.011) 0:01:41.865 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.013) 0:01:41.878 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.049) 0:01:41.927 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.013) 0:01:41.940 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.014) 0:01:41.955 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.011) 0:01:41.966 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:23:46 +0000 (0:00:00.292) 0:01:42.259 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.271 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.012) 0:01:42.283 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.023) 0:01:42.307 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.318 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.329 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.340 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.012) 0:01:42.352 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.013) 0:01:42.366 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.025) 0:01:42.392 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.022) 0:01:42.415 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.019) 0:01:42.434 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.018) 0:01:42.453 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.019) 0:01:42.472 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.013) 0:01:42.485 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.496 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.507 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.518 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.529 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.539 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.551 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.561 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:42.572 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.583 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:42.595 ********* ok: [sut] => { "bytes": 2684354560, "changed": false, "lvm": "2g", "parted": "2GiB", "size": "2 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.127) 0:01:42.722 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.022) 0:01:42.745 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.020) 0:01:42.766 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.013) 0:01:42.779 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.128) 0:01:42.907 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test1", "_mount_id": "/dev/mapper/foo-test1", "_raw_device": "/dev/mapper/foo-test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "60%", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "25%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.025) 0:01:42.933 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "2.5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.024) 0:01:42.958 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.024) 0:01:42.983 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "2684354560.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.024) 0:01:43.007 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.019 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.030 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.041 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.051 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.063 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.074 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.084 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.094 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.104 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.009) 0:01:43.114 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.126 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.137 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.011) 0:01:43.148 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.159 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.010) 0:01:43.169 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 2684354560, "changed": false, "failed": false, "lvm": "2g", "parted": "2GiB", "size": "2 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.012) 0:01:43.182 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.013) 0:01:43.196 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:23:47 +0000 (0:00:00.024) 0:01:43.221 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test2" ], "delta": "0:00:00.018420", "end": "2024-06-27 03:23:48.081535", "rc": 0, "start": "2024-06-27 03:23:48.063115" } STDOUT: LVM2_LV_NAME=test2 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.142) 0:01:43.363 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.022) 0:01:43.385 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.022) 0:01:43.407 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.019) 0:01:43.426 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.020) 0:01:43.446 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.018) 0:01:43.465 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.019) 0:01:43.484 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.012) 0:01:43.496 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.009) 0:01:43.506 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get the size of test2 volume again] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:126 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.010) 0:01:43.516 ********* ok: [sut] => { "changed": false, "cmd": [ "lsblk", "--noheadings", "-o", "SIZE", "/dev/mapper/foo-test2" ], "delta": "0:00:00.004361", "end": "2024-06-27 03:23:48.363149", "rc": 0, "start": "2024-06-27 03:23:48.358788" } STDOUT: 2.5G TASK [Verify that removing test1 didn't cause a change in test2 size] ********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:131 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.127) 0:01:43.644 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Grow test2 using a percentage-based size spec] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:135 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.048) 0:01:43.692 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.040) 0:01:43.732 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.018) 0:01:43.751 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.022) 0:01:43.773 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.028) 0:01:43.802 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.014) 0:01:43.816 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.013) 0:01:43.830 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.014) 0:01:43.844 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.012) 0:01:43.856 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:23:48 +0000 (0:00:00.027) 0:01:43.883 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:23:49 +0000 (0:00:00.492) 0:01:44.375 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "state": "present", "volumes": [ { "mount_point": "/opt/test2", "name": "test2", "size": "50%" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:23:49 +0000 (0:00:00.017) 0:01:44.393 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:23:49 +0000 (0:00:00.014) 0:01:44.407 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:23:53 +0000 (0:00:04.098) 0:01:48.506 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:23:53 +0000 (0:00:00.022) 0:01:48.529 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:23:53 +0000 (0:00:00.019) 0:01:48.548 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:23:53 +0000 (0:00:00.020) 0:01:48.568 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:23:53 +0000 (0:00:00.019) 0:01:48.587 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "7:lvm2-2.02.187-6.el7_9.5.x86_64 providing lvm2 is already installed", "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx lvm2 TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:23:53 +0000 (0:00:00.296) 0:01:48.884 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:23:54 +0000 (0:00:00.762) 0:01:49.647 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:23:54 +0000 (0:00:00.034) 0:01:49.681 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:23:54 +0000 (0:00:00.010) 0:01:49.691 ********* changed: [sut] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test2", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:23:59 +0000 (0:00:04.865) 0:01:54.557 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.020) 0:01:54.577 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.010) 0:01:54.587 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test2", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" } ], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.016) 0:01:54.603 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.016) 0:01:54.619 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.013) 0:01:54.633 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.021) 0:01:54.655 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.259) 0:01:54.914 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.159) 0:01:55.074 ********* skipping: [sut] => (item={'src': '/dev/mapper/foo-test2', 'group': None, 'dump': 0, 'passno': 0, 'fstype': 'ext4', 'state': 'mounted', 'mode': None, 'owner': None, 'path': '/opt/test2', 'opts': 'defaults'}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "ext4", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:23:59 +0000 (0:00:00.026) 0:01:55.100 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.249) 0:01:55.349 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.132) 0:01:55.481 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.010) 0:01:55.492 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:148 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.633) 0:01:56.126 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.029) 0:01:56.156 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.023) 0:01:56.179 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:24:00 +0000 (0:00:00.019) 0:01:56.198 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.128) 0:01:56.326 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003238", "end": "2024-06-27 03:24:01.175226", "rc": 0, "start": "2024-06-27 03:24:01.171988" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 /dev/mapper/foo-test2 /opt/test2 ext4 defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.128) 0:01:56.455 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003063", "end": "2024-06-27 03:24:01.316377", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:24:01.313314" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.141) 0:01:56.596 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'present', 'volumes': [{'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '50%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.037) 0:01:56.634 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.012) 0:01:56.646 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheadings", "--binary", "-o", "shared", "foo" ], "delta": "0:00:00.019413", "end": "2024-06-27 03:24:01.515610", "rc": 0, "start": "2024-06-27 03:24:01.496197" } STDOUT: 0 TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.149) 0:01:56.796 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.018) 0:01:56.815 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.024) 0:01:56.840 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.031) 0:01:56.871 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda", "pv": "/dev/sda" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.131) 0:01:57.003 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.064) 0:01:57.067 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.025) 0:01:57.093 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.024) 0:01:57.117 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.014) 0:01:57.132 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.023) 0:01:57.155 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.011) 0:01:57.166 ********* ok: [sut] => (item=/dev/sda) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda" } MSG: All assertions passed TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:24:01 +0000 (0:00:00.018) 0:01:57.185 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.710) 0:01:57.896 ********* skipping: [sut] => (item=/dev/sda) => { "ansible_loop_var": "st_pool_pv", "changed": false, "false_condition": "grow_supported.stdout | trim == 'True'", "skip_reason": "Conditional result was False", "st_pool_pv": "/dev/sda" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.024) 0:01:57.920 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.022) 0:01:57.943 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:57.954 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.012) 0:01:57.967 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:57.978 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:57.989 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:58.001 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:58.012 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:58.023 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.012) 0:01:58.036 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:58.048 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.011) 0:01:58.059 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.012) 0:01:58.071 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.024) 0:01:58.096 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '50%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:8 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.022) 0:01:58.119 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:16 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.015) 0:01:58.134 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:20 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.014) 0:01:58.149 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:27 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.014) 0:01:58.163 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:31 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.016) 0:01:58.180 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:37 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.014) 0:01:58.194 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-lvmraid.yml:42 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.014) 0:01:58.209 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_lvmraid_volume.raid_level is not none", "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.015) 0:01:58.224 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:24:02 +0000 (0:00:00.023) 0:01:58.248 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '50%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:8 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.022) 0:01:58.270 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:16 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.282 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:22 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:58.294 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_thin_volume.thin", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-thin.yml:26 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:58.305 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.318 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.026) 0:01:58.344 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.022) 0:01:58.367 ********* skipping: [sut] => (item=/dev/sda) => { "_storage_test_pool_member_path": "/dev/sda", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.013) 0:01:58.380 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml for sut => (item=/dev/sda) TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:2 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.020) 0:01:58.401 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:6 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.023) 0:01:58.425 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:14 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.023) 0:01:58.449 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:23 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.019) 0:01:58.468 ********* skipping: [sut] => { "changed": false, "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:32 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.018) 0:01:58.487 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-crypttab.yml:41 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.018) 0:01:58.506 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.013) 0:01:58.519 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.531 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.025) 0:01:58.557 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '50%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:8 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.021) 0:01:58.579 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:15 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:58.590 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:21 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:58.601 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:27 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:58.612 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:34 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:58.623 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:40 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:58.633 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-member-vdo.yml:46 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.646 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.658 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.026) 0:01:58.685 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.697 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.014) 0:01:58.712 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.066) 0:01:58.778 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.015) 0:01:58.793 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.014) 0:01:58.807 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.820 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:58.831 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml for sut => (item={'_raw_device': '/dev/mapper/foo-test2', 'raid_metadata_version': None, 'mount_device_identifier': 'uuid', 'fs_type': 'ext4', 'mount_options': 'defaults', '_device': '/dev/mapper/foo-test2', 'size': '50%', 'mount_point': '/opt/test2', 'compression': None, 'encryption_password': None, '_kernel_device': '/dev/dm-0', 'encryption': False, 'raid_level': None, 'raid_device_count': None, 'state': 'present', 'vdo_pool_size': None, 'mount_mode': None, 'thin_pool_name': None, 'thin_pool_size': None, 'encryption_cipher': None, 'deduplication': None, 'encryption_key_size': None, 'encryption_key': None, 'fs_label': '', 'encryption_luks_version': None, 'raid_stripe_size': None, 'mount_passno': 0, '_mount_id': '/dev/mapper/foo-test2', 'mount_user': None, 'raid_spare_count': None, 'raid_disks': [], '_raw_kernel_device': '/dev/dm-0', 'cache_mode': None, 'cache_devices': [], 'name': 'test2', 'mount_group': None, 'type': 'lvm', 'disks': ['sda'], 'cached': False, 'thin': False, 'mount_check': 0, 'cache_size': 0, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'fs_create_options': ''}) TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:2 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.020) 0:01:58.852 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:19 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.022) 0:01:58.874 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml for sut => (item=mount) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml for sut => (item=fstab) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml for sut => (item=fs) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml for sut => (item=device) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml for sut => (item=encryption) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml for sut => (item=md) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml for sut => (item=size) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml for sut => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:7 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.049) 0:01:58.924 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:11 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.018) 0:01:58.942 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:19 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.028) 0:01:58.971 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:28 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:58.983 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:36 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.014) 0:01:58.997 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:42 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:59.009 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:48 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:59.020 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:57 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:59.031 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:63 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:59.042 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:69 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.011) 0:01:59.053 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-mount.yml:79 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.010) 0:01:59.064 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:2 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:59.077 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "/dev/mapper/foo-test2 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 ext4 defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:17 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.034) 0:01:59.112 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:24 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.023) 0:01:59.135 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:33 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.022) 0:01:59.158 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fstab.yml:45 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.019) 0:01:59.178 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:6 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.012) 0:01:59.190 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-fs.yml:14 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.027) 0:01:59.218 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:3 Thursday 27 June 2024 03:24:03 +0000 (0:00:00.023) 0:01:59.241 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719458639.2410233, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1719458639.2410233, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 56835, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1719458639.2410233, "nlink": 1, "path": "/dev/mapper/foo-test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:9 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.133) 0:01:59.375 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:16 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.015) 0:01:59.390 ********* skipping: [sut] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:23 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:01:59.402 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:29 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.014) 0:01:59.416 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:33 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.013) 0:01:59.430 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-device.yml:38 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:01:59.441 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:3 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.014) 0:01:59.456 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:01:59.467 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "cryptsetup-2.0.3-6.el7.x86_64 providing cryptsetup is already installed" ] } lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:16 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.299) 0:01:59.766 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:22 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:01:59.778 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:29 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.012) 0:01:59.790 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:40 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.024) 0:01:59.814 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:46 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:01:59.826 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:51 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:01:59.837 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:63 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:01:59.848 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:75 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:01:59.859 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:87 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.012) 0:01:59.871 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:93 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.026) 0:01:59.897 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:100 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.029) 0:01:59.926 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:108 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.022) 0:01:59.949 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:116 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.019) 0:01:59.969 ********* skipping: [sut] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:124 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.018) 0:01:59.988 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:8 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.013) 0:02:00.001 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:14 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:02:00.012 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:19 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:02:00.023 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:24 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:02:00.034 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:29 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:02:00.045 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:37 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:02:00.056 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:46 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:02:00.067 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:54 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:02:00.079 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:62 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.010) 0:02:00.090 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-md.yml:70 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:02:00.101 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:3 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.011) 0:02:00.113 ********* ok: [sut] => { "bytes": 5368709120, "changed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:11 Thursday 27 June 2024 03:24:04 +0000 (0:00:00.129) 0:02:00.242 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:20 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.026) 0:02:00.268 ********* skipping: [sut] => { "changed": false, "false_condition": "'%' not in storage_test_volume.size | string", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:28 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.022) 0:02:00.290 ********* ok: [sut] => { "storage_test_expected_size": "2684354560.0" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:32 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.015) 0:02:00.306 ********* ok: [sut] => { "bytes": 10737418240, "changed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:46 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.129) 0:02:00.436 ********* ok: [sut] => { "storage_test_pool": { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test2", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test2", "_raw_device": "/dev/mapper/foo-test2", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "ext4", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "50%", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } } TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:50 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.026) 0:02:00.462 ********* ok: [sut] => { "storage_test_blkinfo": { "changed": false, "failed": false, "info": { "/dev/mapper/foo-test2": { "fstype": "ext4", "label": "", "mountpoint": "/opt/test2", "name": "/dev/mapper/foo-test2", "size": "5G", "type": "lvm", "uuid": "bac098f2-ba6b-4948-9b1c-cb29b4c4b799" }, "/dev/sda": { "fstype": "LVM2_member", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "3cn5A0-8nED-OjJz-JJih-4eS6-6E62-2gQgOz" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } } TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:54 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.027) 0:02:00.490 ********* ok: [sut] => { "storage_test_pool_size": { "bytes": 10737418240, "changed": false, "failed": false, "lvm": "10g", "parted": "10GiB", "size": "10 GiB" } } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:58 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.029) 0:02:00.519 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "5368709120.0" }, "changed": false } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:67 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.025) 0:02:00.545 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:71 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.014) 0:02:00.559 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:76 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.013) 0:02:00.572 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:82 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.011) 0:02:00.584 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:86 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.010) 0:02:00.595 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:91 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.014) 0:02:00.609 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:96 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.012) 0:02:00.622 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:101 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.013) 0:02:00.636 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:105 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.013) 0:02:00.650 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:109 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.011) 0:02:00.661 ********* skipping: [sut] => { "false_condition": "storage_test_volume.thin" } TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:113 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.010) 0:02:00.672 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:120 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.012) 0:02:00.685 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:127 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.011) 0:02:00.696 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:131 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.011) 0:02:00.707 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:137 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.011) 0:02:00.718 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.thin", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:143 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.010) 0:02:00.729 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 5368709120, "changed": false, "failed": false, "lvm": "5g", "parted": "5GiB", "size": "5 GiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:147 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.013) 0:02:00.742 ********* ok: [sut] => { "storage_test_expected_size": "5368709120.0" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-size.yml:151 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.014) 0:02:00.757 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:5 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.127) 0:02:00.884 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test2" ], "delta": "0:00:00.019584", "end": "2024-06-27 03:24:05.747522", "rc": 0, "start": "2024-06-27 03:24:05.727938" } STDOUT: LVM2_LV_NAME=test2 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:13 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.143) 0:02:01.028 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:17 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.024) 0:02:01.052 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:24 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.023) 0:02:01.076 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:31 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.020) 0:02:01.096 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:37 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.020) 0:02:01.117 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-cache.yml:42 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.025) 0:02:01.142 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_volume.cached | bool", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume.yml:25 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.020) 0:02:01.163 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.012) 0:02:01.176 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.009) 0:02:01.185 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Remove both of the LVM logical volumes in 'foo' created above] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:151 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.010) 0:02:01.196 ********* TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.043) 0:02:01.239 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Thursday 27 June 2024 03:24:05 +0000 (0:00:00.017) 0:02:01.257 ********* skipping: [sut] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.023) 0:02:01.281 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_7.yml) => { "ansible_facts": { "__storage_blivet_diskvolume_mkfs_option_map": { "ext2": "-F", "ext3": "-F", "ext4": "-F" }, "blivet_package_list": [ "python-enum34", "python-blivet3", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_7.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_7.yml" } skipping: [sut] => (item=CentOS_7.9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_7.9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.030) 0:02:01.311 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.014) 0:02:01.326 ********* skipping: [sut] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.013) 0:02:01.340 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.012) 0:02:01.352 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.012) 0:02:01.364 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for sut TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.028) 0:02:01.393 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "python-enum34-1.0.4-1.el7.noarch providing python-enum34 is already installed", "1:python2-blivet3-3.1.3-3.el7.noarch providing python-blivet3 is already installed", "libblockdev-crypto-2.18-5.el7.x86_64 providing libblockdev-crypto is already installed", "libblockdev-dm-2.18-5.el7.x86_64 providing libblockdev-dm is already installed", "libblockdev-lvm-2.18-5.el7.x86_64 providing libblockdev-lvm is already installed", "libblockdev-mdraid-2.18-5.el7.x86_64 providing libblockdev-mdraid is already installed", "libblockdev-swap-2.18-5.el7.x86_64 providing libblockdev-swap is already installed" ] } lsrpackages: libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python-blivet3 python-enum34 TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.490) 0:02:01.883 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo", "state": "absent" } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.015) 0:02:01.898 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined" } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Thursday 27 June 2024 03:24:06 +0000 (0:00:00.014) 0:02:01.913 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31 Thursday 27 June 2024 03:24:10 +0000 (0:00:04.197) 0:02:06.111 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for sut TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Thursday 27 June 2024 03:24:10 +0000 (0:00:00.023) 0:02:06.134 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Thursday 27 June 2024 03:24:10 +0000 (0:00:00.019) 0:02:06.153 ********* skipping: [sut] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Thursday 27 June 2024 03:24:10 +0000 (0:00:00.019) 0:02:06.173 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Thursday 27 June 2024 03:24:10 +0000 (0:00:00.018) 0:02:06.191 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [ "kpartx-0.4.9-136.el7_9.x86_64 providing kpartx is already installed" ] } lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 Thursday 27 June 2024 03:24:11 +0000 (0:00:00.302) 0:02:06.494 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "brandbot.service": { "name": "brandbot.service", "source": "systemd", "state": "inactive", "status": "static" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-shell.service": { "name": "console-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.import1.service": { "name": "dbus-org.freedesktop.import1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.machine1.service": { "name": "dbus-org.freedesktop.machine1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "exim.service": { "name": "exim.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmetad.service": { "name": "lvm2-lvmetad.service", "source": "systemd", "state": "running", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "lvm2-pvscan@8:0.service": { "name": "lvm2-pvscan@8:0.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "netconsole": { "name": "netconsole", "source": "sysv", "state": "stopped", "status": "disabled" }, "network": { "name": "network", "source": "sysv", "state": "running", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "active" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-config.service": { "name": "nfs-config.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-idmap.service": { "name": "nfs-idmap.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-lock.service": { "name": "nfs-lock.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-rquotad.service": { "name": "nfs-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-secure-server.service": { "name": "nfs-secure-server.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-secure.service": { "name": "nfs-secure.service", "source": "systemd", "state": "inactive", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs.service": { "name": "nfs.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfslock.service": { "name": "nfslock.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "postfix.service": { "name": "postfix.service", "source": "systemd", "state": "running", "status": "enabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rhel-autorelabel-mark.service": { "name": "rhel-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-autorelabel.service": { "name": "rhel-autorelabel.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-configure.service": { "name": "rhel-configure.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-dmesg.service": { "name": "rhel-dmesg.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-domainname.service": { "name": "rhel-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-import-state.service": { "name": "rhel-import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-loadmodules.service": { "name": "rhel-loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rhel-readonly.service": { "name": "rhel-readonly.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-rquotad.service": { "name": "rpc-rquotad.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpcgssd.service": { "name": "rpcgssd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rpcidmapd.service": { "name": "rpcidmapd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rsyncd.service": { "name": "rsyncd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyncd@.service": { "name": "rsyncd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-policy-migrate-local-changes@.service": { "name": "selinux-policy-migrate-local-changes@.service", "source": "systemd", "state": "unknown", "status": "static" }, "selinux-policy-migrate-local-changes@targeted.service": { "name": "selinux-policy-migrate-local-changes@targeted.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sendmail.service": { "name": "sendmail.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "static" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bootchart.service": { "name": "systemd-bootchart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-importd.service": { "name": "systemd-importd.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-machined.service": { "name": "systemd-machined.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-nspawn@.service": { "name": "systemd-nspawn@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-readahead-collect.service": { "name": "systemd-readahead-collect.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-readahead-done.service": { "name": "systemd-readahead-done.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "systemd-readahead-drop.service": { "name": "systemd-readahead-drop.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "systemd-readahead-replay.service": { "name": "systemd-readahead-replay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill@.service": { "name": "systemd-rfkill@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-shutdownd.service": { "name": "systemd-shutdownd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "wpa_supplicant.service": { "name": "wpa_supplicant.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57 Thursday 27 June 2024 03:24:11 +0000 (0:00:00.766) 0:02:07.261 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63 Thursday 27 June 2024 03:24:12 +0000 (0:00:00.032) 0:02:07.293 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 Thursday 27 June 2024 03:24:12 +0000 (0:00:00.010) 0:02:07.303 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "destroy device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext4", "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83 Thursday 27 June 2024 03:24:16 +0000 (0:00:04.850) 0:02:12.153 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95 Thursday 27 June 2024 03:24:16 +0000 (0:00:00.020) 0:02:12.174 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:101 Thursday 27 June 2024 03:24:16 +0000 (0:00:00.010) 0:02:12.184 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test2", "fs_type": "ext4" }, { "action": "destroy device", "device": "/dev/mapper/foo-test2", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1" ], "mounts": [ { "fstype": "ext4", "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "absent" } ], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Thursday 27 June 2024 03:24:16 +0000 (0:00:00.015) 0:02:12.199 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:114 Thursday 27 June 2024 03:24:16 +0000 (0:00:00.014) 0:02:12.214 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Thursday 27 June 2024 03:24:16 +0000 (0:00:00.013) 0:02:12.227 ********* redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [sut] => (item={'src': '/dev/mapper/foo-test2', 'state': 'absent', 'fstype': 'ext4', 'path': '/opt/test2'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "ext4", "mount_info": { "fstype": "ext4", "path": "/opt/test2", "src": "/dev/mapper/foo-test2", "state": "absent" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/foo-test2" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:141 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.154) 0:02:12.382 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:146 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.259) 0:02:12.641 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:157 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.023) 0:02:12.664 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:169 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.021) 0:02:12.686 ********* ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.242) 0:02:12.929 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1719457592.7553205, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1718879272.062, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131079, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1718879026.308, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "18446744072852913879", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:182 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.129) 0:02:13.058 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:204 Thursday 27 June 2024 03:24:17 +0000 (0:00:00.011) 0:02:13.070 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:160 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.602) 0:02:13.673 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:2 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.030) 0:02:13.703 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:7 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.022) 0:02:13.726 ********* skipping: [sut] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:15 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.018) 0:02:13.745 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "ext4", "label": "", "mountpoint": "/", "name": "/dev/xvda1", "size": "250G", "type": "partition", "uuid": "c7b7d6a5-fd01-4b9b-bcca-153eaff9d312" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:20 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.129) 0:02:13.875 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003082", "end": "2024-06-27 03:24:18.718470", "rc": 0, "start": "2024-06-27 03:24:18.715388" } STDOUT: # # /etc/fstab # Created by anaconda on Thu Jun 20 10:23:46 2024 # # Accessible filesystems, by reference, are maintained under '/dev/disk' # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info # UUID=c7b7d6a5-fd01-4b9b-bcca-153eaff9d312 / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:25 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.122) 0:02:13.998 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003079", "end": "2024-06-27 03:24:18.843389", "failed_when_result": false, "rc": 0, "start": "2024-06-27 03:24:18.840310" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:34 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.124) 0:02:14.122 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml for sut => (item={'grow_to_fill': False, 'name': 'foo', 'encryption_password': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_key_size': None, 'disks': ['sda'], 'encryption_key': None, 'encryption_luks_version': None, 'raid_device_count': None, 'raid_spare_count': None, 'state': 'absent', 'volumes': [], 'encryption_tang_url': None, 'shared': False, 'raid_level': None, 'encryption_clevis_pin': None, 'type': 'lvm', 'encryption_cipher': None, 'encryption_tang_thumbprint': None, 'raid_chunk_size': None}) TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:5 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.126) 0:02:14.249 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:18 Thursday 27 June 2024 03:24:18 +0000 (0:00:00.012) 0:02:14.262 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:24 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.273 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool.yml:34 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.284 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml for sut => (item=members) included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml for sut => (item=volumes) TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:2 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.024) 0:02:14.309 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:8 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.027) 0:02:14.336 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:17 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.347 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:22 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.021) 0:02:14.368 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:27 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.023) 0:02:14.392 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:36 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.024) 0:02:14.416 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:41 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.014) 0:02:14.430 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:46 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.022) 0:02:14.453 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:51 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.464 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:64 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.009) 0:02:14.473 ********* ok: [sut] => { "changed": false, "rc": 0 } STDOUT: False STDERR: Shared connection to 10.31.8.226 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:73 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.160) 0:02:14.634 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:83 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.020) 0:02:14.655 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:8 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.022) 0:02:14.677 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:14 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.688 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:19 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.699 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:24 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.710 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:29 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.012) 0:02:14.723 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:37 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.012) 0:02:14.735 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:46 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.747 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:55 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.758 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:64 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.769 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:74 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.780 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-md.yml:83 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.012) 0:02:14.793 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:86 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:14.804 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-lvmraid.yml:2 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.022) 0:02:14.827 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:89 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.837 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-thin.yml:2 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.023) 0:02:14.861 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:92 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.871 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:5 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.024) 0:02:14.895 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:10 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.021) 0:02:14.917 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:17 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.009) 0:02:14.926 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-encryption.yml:24 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.937 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:95 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.012) 0:02:14.949 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-members-vdo.yml:2 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.025) 0:02:14.974 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Stratis] *********************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:98 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:14.984 ********* included: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml for sut TASK [Run 'stratis report'] **************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:6 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.027) 0:02:15.012 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Get information about Stratis] ******************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:11 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:15.023 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that the pools was created] *************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:15 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:15.034 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:25 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:15.045 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:34 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.010) 0:02:15.055 ********* skipping: [sut] => { "changed": false, "false_condition": "storage_test_pool.type == 'stratis'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-pool-stratis.yml:44 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.012) 0:02:15.067 ********* ok: [sut] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-members.yml:101 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:15.079 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-pool-volumes.yml:3 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.011) 0:02:15.090 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:44 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.009) 0:02:15.099 ********* skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-weekly-ciy6hs_iyw/tests/verify-role-results.yml:54 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.008) 0:02:15.108 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } PLAY RECAP ********************************************************************* sut : ok=812 changed=12 unreachable=0 failed=0 skipped=868 rescued=2 ignored=0 Thursday 27 June 2024 03:24:19 +0000 (0:00:00.021) 0:02:15.130 ********* =============================================================================== fedora.linux_system_roles.storage : Make sure blivet is available ------ 10.10s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.14s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.05s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.89s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.87s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.85s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.65s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Get required packages --------------- 4.31s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.26s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.20s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.20s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 4.10s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.03s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 fedora.linux_system_roles.storage : Get required packages --------------- 3.82s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 3.74s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Ensure cryptsetup is present -------------------------------------------- 1.70s /WORKDIR/git-weekly-ciy6hs_iyw/tests/test-verify-volume-encryption.yml:10 ----- Gathering Facts --------------------------------------------------------- 0.97s /WORKDIR/git-weekly-ciy6hs_iyw/tests/tests_lvm_percent_size.yml:2 ------------- fedora.linux_system_roles.storage : Get service facts ------------------- 0.90s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 fedora.linux_system_roles.storage : Get service facts ------------------- 0.79s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 fedora.linux_system_roles.storage : Get service facts ------------------- 0.77s /WORKDIR/git-weekly-ciy6hs_iyw/.collection/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---