# STDOUT:
---v---v---v---v---v---
ansible-playbook [core 2.16.0]
config file = /etc/ansible/ansible.cfg
configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible
ansible collection location = /WORKDIR/git-weekly-ci1ebenttp/.collection
executable location = /opt/ansible-2.16/bin/ansible-playbook
python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python)
jinja version = 3.1.2
libyaml = True
Using /etc/ansible/ansible.cfg as config file
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: tests_cib_utilization.yml ********************************************
2 plays in /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml
PLAY [all] *********************************************************************
TASK [Include vault variables] *************************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:4
Saturday 25 May 2024 08:56:53 +0000 (0:00:00.012) 0:00:00.012 **********
ok: [sut] => {
"ansible_facts": {
"ha_cluster_hacluster_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n"
}
},
"ansible_included_var_files": [
"/WORKDIR/git-weekly-ci1ebenttp/tests/vars/vault-variables.yml"
],
"changed": false
}
PLAY [Configure utilization] ***************************************************
TASK [Gathering Facts] *********************************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:9
Saturday 25 May 2024 08:56:53 +0000 (0:00:00.013) 0:00:00.026 **********
ok: [sut]
TASK [Set up test environment] *************************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:17
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.861) 0:00:00.888 **********
TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.020) 0:00:00.908 **********
ok: [sut] => {
"ansible_facts": {
"inventory_hostname": "localhost"
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.019) 0:00:00.927 **********
skipping: [sut] => {
"changed": false,
"false_condition": "'distribution' not in ansible_facts",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.008) 0:00:00.935 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:27
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.246) 0:00:01.182 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:32
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.013) 0:00:01.196 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ansible_distribution == 'RedHat'",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Copy nss-altfiles ha_cluster users to /etc/passwd] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:41
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.008) 0:00:01.205 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_is_ostree | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [Find first node name] ****************************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:22
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.018) 0:00:01.223 **********
ok: [sut] => {
"ansible_facts": {
"__test_first_node": "localhost"
},
"changed": false
}
TASK [Run HA Cluster role] *****************************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:26
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.019) 0:00:01.242 **********
TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.029) 0:00:01.271 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.014) 0:00:01.285 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:10
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.018) 0:00:01.303 **********
skipping: [sut] => {
"changed": false,
"false_condition": "not __ha_cluster_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:15
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.011) 0:00:01.315 **********
skipping: [sut] => {
"changed": false,
"false_condition": "not __ha_cluster_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.011) 0:00:01.326 **********
ok: [sut] => (item=RedHat.yml) => {
"ansible_facts": {
"__ha_cluster_cloud_agents_packages": [],
"__ha_cluster_fence_agent_packages_default": "{{ ['fence-agents-all'] + (['fence-virt'] if ansible_architecture == 'x86_64' else []) }}",
"__ha_cluster_fullstack_node_packages": [
"corosync",
"libknet1-plugins-all",
"resource-agents",
"pacemaker",
"openssl"
],
"__ha_cluster_pcs_provider": "pcs-0.10",
"__ha_cluster_qdevice_node_packages": [
"corosync-qdevice",
"bash",
"coreutils",
"curl",
"grep",
"nss-tools",
"openssl",
"sed"
],
"__ha_cluster_repos": [],
"__ha_cluster_role_essential_packages": [
"pcs",
"corosync-qnetd"
],
"__ha_cluster_sbd_packages": [
"sbd"
],
"__ha_cluster_services": [
"corosync",
"corosync-qdevice",
"pacemaker"
]
},
"ansible_included_var_files": [
"/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [sut] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [sut] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__ha_cluster_cloud_agents_packages": [
"resource-agents-aliyun",
"resource-agents-gcp",
"fence-agents-aliyun",
"fence-agents-aws",
"fence-agents-azure-arm",
"fence-agents-gce"
],
"__ha_cluster_repos": [
{
"id": "ha",
"name": "HighAvailability"
},
{
"id": "resilientstorage",
"name": "ResilientStorage"
}
]
},
"ansible_included_var_files": [
"/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
ok: [sut] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__ha_cluster_cloud_agents_packages": [
"resource-agents-aliyun",
"resource-agents-gcp",
"fence-agents-aliyun",
"fence-agents-aws",
"fence-agents-azure-arm",
"fence-agents-gce"
],
"__ha_cluster_repos": [
{
"id": "ha",
"name": "HighAvailability"
},
{
"id": "resilientstorage",
"name": "ResilientStorage"
}
]
},
"ansible_included_var_files": [
"/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
TASK [fedora.linux_system_roles.ha_cluster : Set Linux Pacemaker shell specific variables] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:34
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.028) 0:00:01.354 **********
ok: [sut] => {
"ansible_facts": {},
"ansible_included_var_files": [
"/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/shell_pcs.yml"
],
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.011) 0:00:01.366 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.014) 0:00:01.380 **********
ok: [sut] => (item=RedHat.yml) => {
"ansible_facts": {
"__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/RedHat.yml"
},
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
ok: [sut] => (item=CentOS.yml) => {
"ansible_facts": {
"__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml"
},
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS.yml"
}
skipping: [sut] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file",
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file",
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Run platform/version specific tasks to enable repositories] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:21
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.026) 0:00:01.406 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : List active CentOS repositories] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:3
Saturday 25 May 2024 08:56:54 +0000 (0:00:00.025) 0:00:01.432 **********
ok: [sut] => {
"changed": false,
"cmd": [
"dnf",
"repolist"
],
"delta": "0:00:00.259472",
"end": "2024-05-25 08:56:55.180064",
"rc": 0,
"start": "2024-05-25 08:56:54.920592"
}
STDOUT:
repo id repo name
appstream CentOS Stream 8 - AppStream
baseos CentOS Stream 8 - BaseOS
beaker-client Beaker Client - RedHatEnterpriseLinux8
beaker-harness Beaker harness
beaker-tasks Beaker tasks
beakerlib-libraries Copr repo for beakerlib-libraries owned by bgoncalv
copr:copr.devel.redhat.com:lpol:qa-tools Copr repo for qa-tools owned by lpol
extras CentOS Stream 8 - Extras
extras-common CentOS Stream 8 - Extras common packages
ha CentOS Stream 8 - HighAvailability
TASK [fedora.linux_system_roles.ha_cluster : Enable CentOS repositories] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:10
Saturday 25 May 2024 08:56:55 +0000 (0:00:00.507) 0:00:01.940 **********
skipping: [sut] => (item={'id': 'ha', 'name': 'HighAvailability'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "item.id not in __ha_cluster_repolist.stdout",
"item": {
"id": "ha",
"name": "HighAvailability"
},
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item={'id': 'resilientstorage', 'name': 'ResilientStorage'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "item.name != \"ResilientStorage\" or ha_cluster_enable_repos_resilient_storage",
"item": {
"id": "resilientstorage",
"name": "ResilientStorage"
},
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11
Saturday 25 May 2024 08:56:55 +0000 (0:00:00.012) 0:00:01.953 **********
changed: [sut] => {
"changed": true,
"rc": 0,
"results": [
"Installed: libaio-0.3.112-1.el8.x86_64",
"Installed: rubygem-json-2.1.0-111.module_el8+475+35a6c697.x86_64",
"Installed: bzip2-1.0.6-26.el8.x86_64",
"Installed: corosync-3.1.8-1.el8.x86_64",
"Installed: libqb-1.0.3-13.el8.x86_64",
"Installed: corosync-qnetd-3.0.2-2.el8.x86_64",
"Installed: samba-client-libs-4.19.4-4.el8.x86_64",
"Installed: samba-common-4.19.4-4.el8.noarch",
"Installed: device-mapper-event-8:1.02.181-14.el8.x86_64",
"Installed: libicu-60.3-2.el8_1.x86_64",
"Installed: device-mapper-event-libs-8:1.02.181-14.el8.x86_64",
"Installed: samba-common-libs-4.19.4-4.el8.x86_64",
"Installed: pcs-0.10.18-2.el8.x86_64",
"Installed: ruby-irb-2.5.9-111.module_el8+475+35a6c697.noarch",
"Installed: python3-clufter-0.77.1-5.el8.noarch",
"Installed: lvm2-8:2.03.14-14.el8.x86_64",
"Installed: ruby-2.5.9-111.module_el8+475+35a6c697.x86_64",
"Installed: rubygem-psych-3.0.2-111.module_el8+475+35a6c697.x86_64",
"Installed: ruby-libs-2.5.9-111.module_el8+475+35a6c697.x86_64",
"Installed: cifs-utils-7.0-1.el8.x86_64",
"Installed: libknet1-1.28-1.el8.x86_64",
"Installed: lvm2-libs-8:2.03.14-14.el8.x86_64",
"Installed: rubygems-2.7.6.3-111.module_el8+475+35a6c697.noarch",
"Installed: pacemaker-2.1.7-5.el8.x86_64",
"Installed: resource-agents-4.9.0-54.el8.x86_64",
"Installed: rubygem-did_you_mean-1.2.0-111.module_el8+475+35a6c697.noarch",
"Installed: libknet1-compress-bzip2-plugin-1.28-1.el8.x86_64",
"Installed: device-mapper-persistent-data-0.9.0-7.el8.x86_64",
"Installed: libknet1-compress-lz4-plugin-1.28-1.el8.x86_64",
"Installed: libknet1-compress-lzma-plugin-1.28-1.el8.x86_64",
"Installed: rubygem-io-console-0.4.6-111.module_el8+475+35a6c697.x86_64",
"Installed: net-snmp-libs-1:5.8-30.el8.x86_64",
"Installed: net-tools-2.0-0.52.20160912git.el8.x86_64",
"Installed: python3-pycurl-7.43.0.2-4.el8.x86_64",
"Installed: libknet1-compress-lzo2-plugin-1.28-1.el8.x86_64",
"Installed: libknet1-compress-plugins-all-1.28-1.el8.x86_64",
"Installed: centos-logos-85.8-2.el8.x86_64",
"Installed: overpass-fonts-3.0.2-3.el8.noarch",
"Installed: libknet1-compress-zlib-plugin-1.28-1.el8.x86_64",
"Installed: libknet1-crypto-nss-plugin-1.28-1.el8.x86_64",
"Installed: libwbclient-4.19.4-4.el8.x86_64",
"Installed: libknet1-crypto-openssl-plugin-1.28-1.el8.x86_64",
"Installed: python3-pyparsing-2.1.10-7.el8.noarch",
"Installed: pacemaker-cluster-libs-2.1.7-5.el8.x86_64",
"Installed: libknet1-crypto-plugins-all-1.28-1.el8.x86_64",
"Installed: fontpackages-filesystem-1.44-22.el8.noarch",
"Installed: libknet1-plugins-all-1.28-1.el8.x86_64",
"Installed: pacemaker-libs-2.1.7-5.el8.x86_64",
"Installed: perl-TimeDate-1:2.30-15.module_el8+336+32327ac4.noarch",
"Installed: pacemaker-schemas-2.1.7-5.el8.noarch",
"Installed: liberation-fonts-common-1:2.00.3-7.el8.noarch",
"Installed: rubygem-openssl-2.1.2-111.module_el8+475+35a6c697.x86_64",
"Installed: nss-tools-3.90.0-7.el8.x86_64",
"Installed: rubygem-bigdecimal-1.3.4-111.module_el8+475+35a6c697.x86_64",
"Installed: rubygem-rdoc-6.0.1.1-111.module_el8+475+35a6c697.noarch",
"Installed: liberation-sans-fonts-1:2.00.3-7.el8.noarch",
"Installed: libnozzle1-1.28-1.el8.x86_64",
"Installed: corosynclib-3.1.8-1.el8.x86_64",
"Installed: clufter-bin-0.77.1-5.el8.x86_64",
"Installed: clufter-common-0.77.1-5.el8.noarch",
"Installed: pacemaker-cli-2.1.7-5.el8.x86_64"
]
}
lsrpackages: corosync-qnetd pcs
TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:17
Saturday 25 May 2024 08:57:16 +0000 (0:00:21.021) 0:00:22.975 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:3
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.022) 0:00:22.997 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_node_name": "localhost"
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:7
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.019) 0:00:23.016 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_all_node_names": [
"localhost"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_node_options contains unknown or duplicate nodes] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:16
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.021) 0:00:23.037 **********
skipping: [sut] => {
"changed": false,
"false_condition": "(\n __nodes_from_options != (__nodes_from_options | unique)\n) or (\n __nodes_from_options | difference(__ha_cluster_all_node_names)\n)\n",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Extract node options] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:30
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.019) 0:00:23.056 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_local_node": {
"node_name": "localhost",
"utilization": [
{
"attrs": [
{
"name": "cpu",
"value": 2
},
{
"name": "memory",
"value": 4096
}
]
}
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:43
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.023) 0:00:23.080 **********
skipping: [sut] => (item=ha_cluster_hacluster_password) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "lookup(\"vars\", item, default=\"\") | string | length < 1",
"item": "ha_cluster_hacluster_password",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:53
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.028) 0:00:23.108 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_sbd_enabled",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:69
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.009) 0:00:23.117 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_qnetd.present | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if no valid level is specified for a fencing level] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:79
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.017) 0:00:23.135 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if no target is specified for a fencing level] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:87
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.008) 0:00:23.143 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:101
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.007) 0:00:23.151 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_qdevice_host": "",
"__ha_cluster_qdevice_in_use": false,
"__ha_cluster_qdevice_model": "",
"__ha_cluster_qdevice_pcs_address": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.025) 0:00:23.177 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_sbd_needs_atb": false
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:120
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.023) 0:00:23.200 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_sbd_needs_atb | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:127
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.016) 0:00:23.216 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_public_key_src is not none",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141
Saturday 25 May 2024 08:57:16 +0000 (0:00:00.013) 0:00:23.230 **********
ok: [sut] => {
"changed": false,
"cmd": [
"pcs",
"--version",
"--full"
],
"delta": "0:00:00.991325",
"end": "2024-05-25 08:57:17.743458",
"rc": 0,
"start": "2024-05-25 08:57:16.752133"
}
STDOUT:
0.10.18
booth booth.enable-authfile.set booth.enable-authfile.unset cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.device.model.net.options_tls_and_kaptb corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.config.output-formats pcmk.properties.cluster.defaults pcmk.properties.cluster.describe pcmk.properties.cluster.describe.output-formats pcmk.properties.operation-defaults pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation
TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:148
Saturday 25 May 2024 08:57:17 +0000 (0:00:01.273) 0:00:24.503 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_pcs_capabilities": [
"booth",
"booth.enable-authfile.set",
"booth.enable-authfile.unset",
"cluster.config.backup-local",
"cluster.config.restore-cluster",
"cluster.config.restore-local",
"cluster.config.uuid",
"cluster.create",
"cluster.create.enable",
"cluster.create.local",
"cluster.create.no-keys-sync",
"cluster.create.separated-name-and-address",
"cluster.create.start",
"cluster.create.start.wait",
"cluster.create.transport.knet",
"cluster.create.transport.udp-udpu",
"cluster.create.transport.udp-udpu.no-rrp",
"cluster.destroy",
"cluster.destroy.all",
"cluster.report",
"cluster.verify",
"corosync.authkey.update",
"corosync.config.get",
"corosync.config.get.struct",
"corosync.config.reload",
"corosync.config.sync-to-local-cluster",
"corosync.config.update",
"corosync.link.add",
"corosync.link.remove",
"corosync.link.remove.list",
"corosync.link.update",
"corosync.qdevice",
"corosync.qdevice.model.net",
"corosync.quorum",
"corosync.quorum.device",
"corosync.quorum.device.heuristics",
"corosync.quorum.device.model.net",
"corosync.quorum.device.model.net.options_tls_and_kaptb",
"corosync.quorum.set-expected-votes-runtime",
"corosync.quorum.status",
"corosync.quorum.unblock",
"corosync.totem.block_unlisted_ips",
"corosync.uidgid",
"node.add",
"node.add.enable",
"node.add.separated-name-and-address",
"node.add.start",
"node.add.start.wait",
"node.attributes",
"node.attributes.set-list-for-node",
"node.confirm-off",
"node.fence",
"node.guest",
"node.kill",
"node.maintenance",
"node.maintenance.all",
"node.maintenance.list",
"node.maintenance.wait",
"node.remote",
"node.remote.onfail-demote",
"node.remove",
"node.remove-from-caches",
"node.remove.list",
"node.standby",
"node.standby.all",
"node.standby.list",
"node.standby.wait",
"node.start-stop-enable-disable",
"node.start-stop-enable-disable.all",
"node.start-stop-enable-disable.list",
"node.start-stop-enable-disable.start-wait",
"node.utilization",
"node.utilization.set-list-for-node",
"pcmk.acl.enable-disable",
"pcmk.acl.group",
"pcmk.acl.role",
"pcmk.acl.role.create-with-permissions",
"pcmk.acl.role.delete-with-users-groups",
"pcmk.acl.user",
"pcmk.alert",
"pcmk.cib.checkpoints",
"pcmk.cib.checkpoints.diff",
"pcmk.cib.edit",
"pcmk.cib.get",
"pcmk.cib.get.scope",
"pcmk.cib.roles.promoted-unpromoted",
"pcmk.cib.set",
"pcmk.constraint.colocation.set",
"pcmk.constraint.colocation.set.options",
"pcmk.constraint.colocation.simple",
"pcmk.constraint.colocation.simple.options",
"pcmk.constraint.hide-expired",
"pcmk.constraint.location.simple",
"pcmk.constraint.location.simple.options",
"pcmk.constraint.location.simple.resource-regexp",
"pcmk.constraint.location.simple.rule",
"pcmk.constraint.location.simple.rule.node-attr-type-number",
"pcmk.constraint.location.simple.rule.options",
"pcmk.constraint.location.simple.rule.rule-add-remove",
"pcmk.constraint.no-autocorrect",
"pcmk.constraint.order.set",
"pcmk.constraint.order.set.options",
"pcmk.constraint.order.simple",
"pcmk.constraint.order.simple.options",
"pcmk.constraint.ticket.set",
"pcmk.constraint.ticket.set.options",
"pcmk.constraint.ticket.simple",
"pcmk.constraint.ticket.simple.constraint-id",
"pcmk.properties.cluster",
"pcmk.properties.cluster.config.output-formats",
"pcmk.properties.cluster.defaults",
"pcmk.properties.cluster.describe",
"pcmk.properties.cluster.describe.output-formats",
"pcmk.properties.operation-defaults",
"pcmk.properties.operation-defaults.multiple",
"pcmk.properties.operation-defaults.rule",
"pcmk.properties.operation-defaults.rule-rsc-op",
"pcmk.properties.operation-defaults.rule.hide-expired",
"pcmk.properties.operation-defaults.rule.node-attr-type-number",
"pcmk.properties.resource-defaults",
"pcmk.properties.resource-defaults.multiple",
"pcmk.properties.resource-defaults.rule",
"pcmk.properties.resource-defaults.rule-rsc-op",
"pcmk.properties.resource-defaults.rule.hide-expired",
"pcmk.properties.resource-defaults.rule.node-attr-type-number",
"pcmk.resource.ban-move-clear",
"pcmk.resource.ban-move-clear.bundles",
"pcmk.resource.ban-move-clear.clear-expired",
"pcmk.resource.ban-move-clear.clone",
"pcmk.resource.bundle",
"pcmk.resource.bundle.container-docker",
"pcmk.resource.bundle.container-docker.promoted-max",
"pcmk.resource.bundle.container-podman",
"pcmk.resource.bundle.container-podman.promoted-max",
"pcmk.resource.bundle.container-rkt",
"pcmk.resource.bundle.container-rkt.promoted-max",
"pcmk.resource.bundle.reset",
"pcmk.resource.bundle.wait",
"pcmk.resource.cleanup",
"pcmk.resource.cleanup.one-resource",
"pcmk.resource.cleanup.strict",
"pcmk.resource.clone",
"pcmk.resource.clone.custom-id",
"pcmk.resource.clone.meta-in-create",
"pcmk.resource.clone.wait",
"pcmk.resource.config.output-formats",
"pcmk.resource.create",
"pcmk.resource.create.clone.custom-id",
"pcmk.resource.create.in-existing-bundle",
"pcmk.resource.create.meta",
"pcmk.resource.create.no-master",
"pcmk.resource.create.operations",
"pcmk.resource.create.operations.onfail-demote",
"pcmk.resource.create.promotable",
"pcmk.resource.create.promotable.custom-id",
"pcmk.resource.create.wait",
"pcmk.resource.debug",
"pcmk.resource.delete",
"pcmk.resource.disable.safe",
"pcmk.resource.disable.safe.brief",
"pcmk.resource.disable.safe.tag",
"pcmk.resource.disable.simulate",
"pcmk.resource.disable.simulate.brief",
"pcmk.resource.disable.simulate.tag",
"pcmk.resource.enable-disable",
"pcmk.resource.enable-disable.list",
"pcmk.resource.enable-disable.tag",
"pcmk.resource.enable-disable.wait",
"pcmk.resource.failcount",
"pcmk.resource.group",
"pcmk.resource.group.add-remove-list",
"pcmk.resource.group.wait",
"pcmk.resource.manage-unmanage",
"pcmk.resource.manage-unmanage.list",
"pcmk.resource.manage-unmanage.tag",
"pcmk.resource.manage-unmanage.with-monitor",
"pcmk.resource.move.autoclean",
"pcmk.resource.promotable",
"pcmk.resource.promotable.custom-id",
"pcmk.resource.promotable.meta-in-create",
"pcmk.resource.promotable.wait",
"pcmk.resource.refresh",
"pcmk.resource.refresh.one-resource",
"pcmk.resource.refresh.strict",
"pcmk.resource.relations",
"pcmk.resource.relocate",
"pcmk.resource.restart",
"pcmk.resource.update",
"pcmk.resource.update-meta",
"pcmk.resource.update-meta.list",
"pcmk.resource.update-meta.wait",
"pcmk.resource.update-operations",
"pcmk.resource.update-operations.onfail-demote",
"pcmk.resource.update.meta",
"pcmk.resource.update.operations",
"pcmk.resource.update.operations.onfail-demote",
"pcmk.resource.update.wait",
"pcmk.resource.utilization",
"pcmk.resource.utilization-set-list-for-resource",
"pcmk.stonith.cleanup",
"pcmk.stonith.cleanup.one-resource",
"pcmk.stonith.cleanup.strict",
"pcmk.stonith.create",
"pcmk.stonith.create.in-group",
"pcmk.stonith.create.meta",
"pcmk.stonith.create.operations",
"pcmk.stonith.create.operations.onfail-demote",
"pcmk.stonith.create.wait",
"pcmk.stonith.delete",
"pcmk.stonith.enable-disable",
"pcmk.stonith.enable-disable.list",
"pcmk.stonith.enable-disable.wait",
"pcmk.stonith.history.cleanup",
"pcmk.stonith.history.show",
"pcmk.stonith.history.update",
"pcmk.stonith.levels",
"pcmk.stonith.levels.add-remove-devices-list",
"pcmk.stonith.levels.clear",
"pcmk.stonith.levels.node-attr",
"pcmk.stonith.levels.node-regexp",
"pcmk.stonith.levels.verify",
"pcmk.stonith.refresh",
"pcmk.stonith.refresh.one-resource",
"pcmk.stonith.refresh.strict",
"pcmk.stonith.update",
"pcmk.stonith.update.scsi-devices",
"pcmk.stonith.update.scsi-devices.add-remove",
"pcmk.stonith.update.scsi-devices.mpath",
"pcmk.tag",
"pcmk.tag.resources",
"pcs.auth.client",
"pcs.auth.client.cluster",
"pcs.auth.client.token",
"pcs.auth.deauth-client",
"pcs.auth.deauth-server",
"pcs.auth.no-bidirectional",
"pcs.auth.separated-name-and-address",
"pcs.auth.server.token",
"pcs.cfg-in-file.cib",
"pcs.daemon-ssl-cert.set",
"pcs.daemon-ssl-cert.sync-to-local-cluster",
"pcs.disaster-recovery.essentials",
"pcs.request-timeout",
"resource-agents.describe",
"resource-agents.list",
"resource-agents.list.detailed",
"resource-agents.ocf.version-1-0",
"resource-agents.ocf.version-1-1",
"resource-agents.self-validation",
"sbd",
"sbd.option-timeout-action",
"sbd.shared-block-device",
"status.corosync.membership",
"status.pcmk.resources.hide-inactive",
"status.pcmk.resources.id",
"status.pcmk.resources.node",
"status.pcmk.resources.orphaned",
"status.pcmk.xml",
"stonith-agents.describe",
"stonith-agents.list",
"stonith-agents.list.detailed",
"stonith-agents.ocf.version-1-0",
"stonith-agents.ocf.version-1-1",
"stonith-agents.self-validation"
],
"__ha_cluster_pcsd_capabilities_available": false
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155
Saturday 25 May 2024 08:57:17 +0000 (0:00:00.017) 0:00:24.521 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_pcsd_capabilities_available",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Parse pcsd capabilities] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:163
Saturday 25 May 2024 08:57:17 +0000 (0:00:00.008) 0:00:24.530 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_pcsd_capabilities": []
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fail if pcs is to old to configure resources and operations defaults] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:172
Saturday 25 May 2024 08:57:17 +0000 (0:00:00.019) 0:00:24.549 **********
skipping: [sut] => {
"changed": false,
"false_condition": "( ha_cluster_resource_defaults and not 'pcmk.properties.resource-defaults.multiple' in __ha_cluster_pcs_capabilities ) or ( ha_cluster_resource_operation_defaults and not 'pcmk.properties.operation-defaults.multiple' in __ha_cluster_pcs_capabilities )",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:22
Saturday 25 May 2024 08:57:17 +0000 (0:00:00.009) 0:00:24.558 **********
changed: [sut] => {
"append": false,
"changed": true,
"comment": "cluster user",
"group": 189,
"home": "/home/hacluster",
"move_home": false,
"name": "hacluster",
"password": "NOT_LOGGING_PASSWORD",
"shell": "/sbin/nologin",
"state": "present",
"uid": 189
}
TASK [fedora.linux_system_roles.ha_cluster : Configure shell] ******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:29
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.439) 0:00:24.998 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.024) 0:00:25.023 **********
ok: [sut] => {
"changed": false,
"name": "pcsd",
"state": "stopped",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice systemd-journald.socket pcsd-ruby.service sysinit.target basic.target network-online.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ConsistsOf": "pcsd-ruby.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "PCS GUI and remote configuration interface",
"DevicePolicy": "auto",
"Documentation": "man:pcsd(8) man:pcs(8)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/pcsd.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "pcsd.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "pcsd.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "pcsd-ruby.service sysinit.target network-online.target system.slice",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:11
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.498) 0:00:25.521 **********
skipping: [sut] => (item=/var/lib/pcsd/pcsd.key) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "ha_cluster_regenerate_keys",
"item": "/var/lib/pcsd/pcsd.key",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item=/var/lib/pcsd/pcsd.crt) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "ha_cluster_regenerate_keys",
"item": "/var/lib/pcsd/pcsd.crt",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:25
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.010) 0:00:25.532 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_certificates | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:30
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.017) 0:00:25.550 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_certificates | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:37
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.018) 0:00:25.569 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_certificates | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:49
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.018) 0:00:25.587 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_certificates | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:64
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.018) 0:00:25.605 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_public_key_src is string",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:71
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.021) 0:00:25.627 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_pcsd_public_key_src is string",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79
Saturday 25 May 2024 08:57:18 +0000 (0:00:00.025) 0:00:25.652 **********
changed: [sut] => {
"changed": true,
"checksum": "9bbea634a798cf0976b80cd3c4e34aca6a6a7d44",
"dest": "/var/lib/pcsd/pcs_settings.conf",
"gid": 0,
"group": "root",
"md5sum": "4b74001d21d3867563d0c773bde32b42",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:cluster_var_lib_t:s0",
"size": 361,
"src": "/root/.ansible/tmp/ansible-tmp-1716627438.9481547-11774-24720070079491/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88
Saturday 25 May 2024 08:57:19 +0000 (0:00:00.618) 0:00:26.271 **********
changed: [sut] => {
"changed": true,
"enabled": true,
"name": "pcsd",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "systemd-journald.socket network-online.target sysinit.target basic.target system.slice pcsd-ruby.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ConsistsOf": "pcsd-ruby.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "PCS GUI and remote configuration interface",
"DevicePolicy": "auto",
"Documentation": "man:pcsd(8) man:pcs(8)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/pcsd.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "pcsd.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "pcsd.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "network-online.target sysinit.target system.slice pcsd-ruby.service",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] ***************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:35
Saturday 25 May 2024 08:57:21 +0000 (0:00:01.996) 0:00:28.268 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml for sut
TASK [Ensure the service and the ports status with the firewall role] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml:3
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.021) 0:00:28.289 **********
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.035) 0:00:28.324 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for sut
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.025) 0:00:28.350 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.020) 0:00:28.370 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.199) 0:00:28.570 **********
ok: [sut] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:19
Saturday 25 May 2024 08:57:21 +0000 (0:00:00.024) 0:00:28.595 **********
ok: [sut] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: firewalld
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 25 May 2024 08:57:24 +0000 (0:00:02.432) 0:00:31.027 **********
skipping: [sut] => {
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 25 May 2024 08:57:24 +0000 (0:00:00.019) 0:00:31.047 **********
skipping: [sut] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 25 May 2024 08:57:24 +0000 (0:00:00.027) 0:00:31.074 **********
ok: [sut] => {
"changed": false,
"name": "firewalld",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target dbus.service dbus.socket system.slice basic.target polkit.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service nftables.service shutdown.target ip6tables.service ebtables.service iptables.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 25 May 2024 08:57:24 +0000 (0:00:00.354) 0:00:31.429 **********
changed: [sut] => {
"changed": true,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target polkit.service system.slice dbus.service dbus.socket sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target ipset.service ebtables.service nftables.service ip6tables.service iptables.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 25 May 2024 08:57:25 +0000 (0:00:00.949) 0:00:32.378 **********
ok: [sut] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/libexec/platform-python",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 25 May 2024 08:57:25 +0000 (0:00:00.030) 0:00:32.409 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 25 May 2024 08:57:25 +0000 (0:00:00.019) 0:00:32.428 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 25 May 2024 08:57:25 +0000 (0:00:00.019) 0:00:32.447 **********
changed: [sut] => (item={'service': 'high-availability', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"service": "high-availability",
"state": "enabled"
}
}
changed: [sut] => (item={'port': '1229/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "1229/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 25 May 2024 08:57:27 +0000 (0:00:01.521) 0:00:33.968 **********
skipping: [sut] => (item={'service': 'high-availability', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"service": "high-availability",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [sut] => (item={'port': '1229/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "1229/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.047) 0:00:34.016 **********
skipping: [sut] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.026) 0:00:34.042 **********
skipping: [sut] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.023) 0:00:34.066 **********
skipping: [sut] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.023) 0:00:34.090 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.018) 0:00:34.109 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.018) 0:00:34.127 **********
skipping: [sut] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:38
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.020) 0:00:34.147 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Populate service facts] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3
Saturday 25 May 2024 08:57:27 +0000 (0:00:00.024) 0:00:34.172 **********
ok: [sut] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"arp-ethers.service": {
"name": "arp-ethers.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"auto-cpufreq.service": {
"name": "auto-cpufreq.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"corosync-notifyd.service": {
"name": "corosync-notifyd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync-qnetd.service": {
"name": "corosync-qnetd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync.service": {
"name": "corosync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crm_mon.service": {
"name": "crm_mon.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-activation.service": {
"name": "lvm2-activation.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-pvscan@.service": {
"name": "lvm2-pvscan@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pacemaker.service": {
"name": "pacemaker.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pcsd-ruby.service": {
"name": "pcsd-ruby.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"pcsd.service": {
"name": "pcsd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"power-profiles-daemon.service": {
"name": "power-profiles-daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"snapd.seeded.service": {
"name": "snapd.seeded.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tlp.service": {
"name": "tlp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"yppasswdd.service": {
"name": "yppasswdd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypserv.service": {
"name": "ypserv.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypxfrd.service": {
"name": "ypxfrd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Set the fence-virt/fence-agents port to _ha_cluster_selinux] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:12
Saturday 25 May 2024 08:57:29 +0000 (0:00:01.764) 0:00:35.936 **********
ok: [sut] => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Get associated selinux ports] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:20
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.028) 0:00:35.965 **********
ok: [sut] => {
"changed": false,
"cmd": "set -euo pipefail\nfirewall-cmd --info-service=high-availability | egrep \" +ports: +\" | sed -e \"s/ *ports: //\"",
"delta": "0:00:00.308971",
"end": "2024-05-25 08:57:29.720971",
"rc": 0,
"start": "2024-05-25 08:57:29.412000"
}
STDOUT:
2224/tcp 3121/tcp 5403/tcp 5404/udp 5405-5412/udp 9929/tcp 9929/udp 21064/tcp
TASK [fedora.linux_system_roles.ha_cluster : Add the high-availability service ports to _ha_cluster_selinux] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:28
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.518) 0:00:36.484 **********
ok: [sut] => (item=2224/tcp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "2224/tcp"
}
ok: [sut] => (item=3121/tcp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "3121/tcp"
}
ok: [sut] => (item=5403/tcp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "5403/tcp"
}
ok: [sut] => (item=5404/udp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "5404/udp"
}
ok: [sut] => (item=5405-5412/udp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5405-5412",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "5405-5412/udp"
}
ok: [sut] => (item=9929/tcp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5405-5412",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "9929",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "9929/tcp"
}
ok: [sut] => (item=9929/udp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5405-5412",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "9929",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "9929",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "9929/udp"
}
ok: [sut] => (item=21064/tcp) => {
"ansible_facts": {
"_ha_cluster_selinux": [
{
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "5405-5412",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "9929",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "9929",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
{
"local": true,
"ports": "21064",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": "21064/tcp"
}
TASK [Ensure the service and the ports status with the selinux role] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:35
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.113) 0:00:36.598 **********
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean
TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.050) 0:00:36.649 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for sut
TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.036) 0:00:36.685 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7
Saturday 25 May 2024 08:57:29 +0000 (0:00:00.027) 0:00:36.713 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for sut
TASK [fedora.linux_system_roles.selinux : Check if system is ostree] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5
Saturday 25 May 2024 08:57:30 +0000 (0:00:00.035) 0:00:36.748 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10
Saturday 25 May 2024 08:57:30 +0000 (0:00:00.210) 0:00:36.958 **********
ok: [sut] => {
"ansible_facts": {
"__selinux_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:14
Saturday 25 May 2024 08:57:30 +0000 (0:00:00.030) 0:00:36.988 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ansible_python_version is version('3', '<')",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:23
Saturday 25 May 2024 08:57:30 +0000 (0:00:00.026) 0:00:37.015 **********
ok: [sut] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
lsrpackages: python3-libselinux python3-policycoreutils
TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:34
Saturday 25 May 2024 08:57:32 +0000 (0:00:02.472) 0:00:39.487 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ansible_os_family == \"Suse\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:45
Saturday 25 May 2024 08:57:32 +0000 (0:00:00.025) 0:00:39.513 **********
changed: [sut] => {
"changed": true,
"rc": 0,
"results": [
"Installed: policycoreutils-python-utils-2.9-26.el8.noarch"
]
}
lsrpackages: policycoreutils-python-utils
TASK [fedora.linux_system_roles.selinux : Refresh facts] ***********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:53
Saturday 25 May 2024 08:57:36 +0000 (0:00:03.281) 0:00:42.795 **********
ok: [sut]
TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5
Saturday 25 May 2024 08:57:36 +0000 (0:00:00.838) 0:00:43.633 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13
Saturday 25 May 2024 08:57:36 +0000 (0:00:00.024) 0:00:43.657 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ansible_selinux.status == \"disabled\" and selinux_state",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21
Saturday 25 May 2024 08:57:36 +0000 (0:00:00.023) 0:00:43.681 **********
ok: [sut] => {
"ansible_facts": {
"selinux_reboot_required": false
},
"changed": false
}
TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25
Saturday 25 May 2024 08:57:36 +0000 (0:00:00.029) 0:00:43.711 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_reboot_required",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30
Saturday 25 May 2024 08:57:36 +0000 (0:00:00.023) 0:00:43.734 **********
skipping: [sut] => {
"false_condition": "ansible_selinux.status == \"disabled\""
}
TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.023) 0:00:43.758 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_all_purge | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.024) 0:00:43.782 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_booleans_purge | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.023) 0:00:43.806 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_fcontexts_purge | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.024) 0:00:43.830 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_ports_purge | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.024) 0:00:43.854 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_logins_purge | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.023) 0:00:43.877 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.010) 0:00:43.888 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87
Saturday 25 May 2024 08:57:37 +0000 (0:00:00.012) 0:00:43.900 **********
changed: [sut] => (item={'ports': '1229', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "1229",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"1229"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '2224', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "2224",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"2224"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '3121', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "3121",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"3121"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '5403', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "5403",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"5403"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '5404', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "5404",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"5404"
],
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '5405-5412', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "5405-5412",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"5405-5412"
],
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '9929', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "9929",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"9929"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '9929', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "9929",
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"9929"
],
"proto": "udp",
"setype": "cluster_port_t",
"state": "present"
}
changed: [sut] => (item={'ports': '21064', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => {
"__selinux_item": {
"local": true,
"ports": "21064",
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
},
"ansible_loop_var": "__selinux_item",
"changed": true,
"ports": [
"21064"
],
"proto": "tcp",
"setype": "cluster_port_t",
"state": "present"
}
TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99
Saturday 25 May 2024 08:57:59 +0000 (0:00:22.763) 0:01:06.663 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112
Saturday 25 May 2024 08:57:59 +0000 (0:00:00.011) 0:01:06.675 **********
ok: [sut] => {
"ansible_facts": {
"selinux_checksums": true,
"selinux_installed_modules": {
"abrt": {
"100": {
"checksum": "sha256:13dad22da122be9f7d5df4dbedae6a515323542fdc1a7e466d7a1a3d36d29731",
"enabled": 1
}
},
"accountsd": {
"100": {
"checksum": "sha256:8bd25829d921be0b5adf92ddaca7ab94cedca1d57796749cfa63571b6550e3da",
"enabled": 1
}
},
"acct": {
"100": {
"checksum": "sha256:2699d826efd46176017695c768804c505a54b277b05f1feb9c43a613bab4e6aa",
"enabled": 1
}
},
"afs": {
"100": {
"checksum": "sha256:99920dd4e0855870f7e6f9666928d13fe18ddccca9d38b92ea70a6ce3c8c7539",
"enabled": 1
}
},
"aiccu": {
"100": {
"checksum": "sha256:a7aedc8354b4335412871adfd2ab5b0c6da1ea63c8dd797718e4214a5d511bb5",
"enabled": 1
}
},
"aide": {
"100": {
"checksum": "sha256:8adb5c3a5ed74695e975eecbf290640b179eb6345a7740745ecfe3164efe209f",
"enabled": 1
}
},
"ajaxterm": {
"100": {
"checksum": "sha256:d3a03c2837d5dde7145e27902ff8578e00734ab34e8ea1a45aee58b83e9ad6d1",
"enabled": 1
}
},
"alsa": {
"100": {
"checksum": "sha256:202f94345fba8f4bc942dc9b75bbb6eea3b4cb02411cf6ed79858d72aa883c89",
"enabled": 1
}
},
"amanda": {
"100": {
"checksum": "sha256:f9a99d97370017307349a154ce479969395bbbfe434e4829573269f770efdd0d",
"enabled": 1
}
},
"amtu": {
"100": {
"checksum": "sha256:bc9934a2ae61fa117614f201479966d788484f3a7382de4ebad99790a465e2b7",
"enabled": 1
}
},
"anaconda": {
"100": {
"checksum": "sha256:b8aabc624243533d483c3dd5574a490a43e7ec0f2f7940798c12b4089bbd0642",
"enabled": 1
}
},
"antivirus": {
"100": {
"checksum": "sha256:1de6460ccaea5a5749eba17489b9765035c8202eb9492485ff39157564001a2c",
"enabled": 1
}
},
"apache": {
"100": {
"checksum": "sha256:1a0c38364558bebdae3efaa1fcf8be232184dcddcaab345bba1c40bf239dd0ed",
"enabled": 1
}
},
"apcupsd": {
"100": {
"checksum": "sha256:175308edb201092c22791f419d32da3f661e7ccfb9c5d5855ad753405c10023b",
"enabled": 1
}
},
"apm": {
"100": {
"checksum": "sha256:a1410f65d6bf017caedaffaa59016877686099fb7df3c4d801136de79a61795e",
"enabled": 1
}
},
"application": {
"100": {
"checksum": "sha256:a8e9d90aa1188068ca66be55c4d8abf9982666171bbdd8d4da1f2a254c34a080",
"enabled": 1
}
},
"arpwatch": {
"100": {
"checksum": "sha256:2cb8afd237d6bc5693e5d54be1a455b6ed632fbbe76cea406163f9c48d00e79f",
"enabled": 1
}
},
"asterisk": {
"100": {
"checksum": "sha256:0b66b387174001e926cf1454c3516bb32d96610a0f598065fe6d7a917ca897fe",
"enabled": 1
}
},
"auditadm": {
"100": {
"checksum": "sha256:dcd9e7f5e71fb9f7aace30b5755efcbf85fe88f884d4253cc9abcad1c44e5f4d",
"enabled": 1
}
},
"authconfig": {
"100": {
"checksum": "sha256:bdb8072e463c84cb01e6933093428be2b6ee5299d82e26730b12dd2b66d89355",
"enabled": 1
}
},
"authlogin": {
"100": {
"checksum": "sha256:a89b04c7a40bb373de2bbb0a2210cca454e7d4a805321fbe65462ae5551db656",
"enabled": 1
}
},
"automount": {
"100": {
"checksum": "sha256:41ec4e0c5c46118cb4dfa8c8b1834f330dce4ffdea3d534a8d5007a63b3e5262",
"enabled": 1
}
},
"avahi": {
"100": {
"checksum": "sha256:7628cb8340258102798a6e36902d0210e2051ffb9fb4f7a1e4c62a612edfe6fa",
"enabled": 1
}
},
"awstats": {
"100": {
"checksum": "sha256:9b92e64a3331076ad443862aa2ba98a2c4d9b00638bf19bb9726f572dee5eff4",
"enabled": 1
}
},
"bacula": {
"100": {
"checksum": "sha256:32cedcc57f6a973ac5adc16d8df343fc1ca4b3716f7cdcdae0d2490a6e5765ac",
"enabled": 1
}
},
"base": {
"100": {
"checksum": "sha256:d99ed290beecf2b10a557a21b06b63cabc28dab4050f2e7197d2cb9e30519fd3",
"enabled": 1
}
},
"bcfg2": {
"100": {
"checksum": "sha256:ea510637d47b7fabc3f617f8a6f3ca3172bf9215c2d6b64ad19cd5d8819c8b6b",
"enabled": 1
}
},
"bind": {
"100": {
"checksum": "sha256:39520749f8aba46f975a87187975d8dcd014ad67d22515951f51fa3fd1b0478f",
"enabled": 1
}
},
"bitlbee": {
"100": {
"checksum": "sha256:bf04e481614825a35c26a547b19098ff1c8acd0d915c5b4f938b9fa595459d00",
"enabled": 1
}
},
"blkmapd": {
"100": {
"checksum": "sha256:ca870c95742bf987a2e739286cbcb998b58c091a422251fdd8de57228b28fd96",
"enabled": 1
}
},
"blueman": {
"100": {
"checksum": "sha256:7e4b2b3df3962273436b561c806c816fe4b1e5d6781efa33a7109b05f796edd7",
"enabled": 1
}
},
"bluetooth": {
"100": {
"checksum": "sha256:da457ef2ce595c3bf9f70697029ea90e96472ae562f685a7f919a7778a778d09",
"enabled": 1
}
},
"boinc": {
"100": {
"checksum": "sha256:d74bd3b6b3850c30b5bbf77822ab82b43f36600e4f76cd68674ef361328afb05",
"enabled": 1
}
},
"boltd": {
"100": {
"checksum": "sha256:4ccf41e247c5a7066042a0ebaae492805a1d640f777e8e771701f340a76bce30",
"enabled": 1
}
},
"bootloader": {
"100": {
"checksum": "sha256:46e55021d6c4cede091a992ab33521bb1aba4ca1d44879d778973b279204933c",
"enabled": 1
}
},
"brctl": {
"100": {
"checksum": "sha256:f9645adde2441e43369a255c6a194f01c6f5800347ad710ce3e147df884b98aa",
"enabled": 1
}
},
"brltty": {
"100": {
"checksum": "sha256:603734d4772f482f282eb217c03647f705d66de27fc927c64e02787369b0f78a",
"enabled": 1
}
},
"bugzilla": {
"100": {
"checksum": "sha256:326d2a188603c908cdae3c9dcdae6bda37b98ec4cc23f3b31878e2bbd0cd33b2",
"enabled": 1
}
},
"bumblebee": {
"100": {
"checksum": "sha256:e8ca8d5318a68243441fdb993fbab6d566f7462fd5557b55733f8ddbfcc4b276",
"enabled": 1
}
},
"cachefilesd": {
"100": {
"checksum": "sha256:86fe9c1aa8b2d7a6bdd9bd8d0c7a41a7ae0e4e14e32eaea6cb920367c2f495d7",
"enabled": 1
}
},
"calamaris": {
"100": {
"checksum": "sha256:1069377693a5d730d57e4ddd6f73ce20b67b595aae90a16459e852d238163b48",
"enabled": 1
}
},
"callweaver": {
"100": {
"checksum": "sha256:880b626c3d04c5669d64ee617ee36a18566e91adeaac67b9527b0a795543575e",
"enabled": 1
}
},
"canna": {
"100": {
"checksum": "sha256:b9256764ca5e34142e8cffea57fafc2fa66f78dc8c05761f97fa9becd1d77311",
"enabled": 1
}
},
"ccs": {
"100": {
"checksum": "sha256:ad293ee5e252966d14fa6bf09240f143460df4b928672a33a398a5793777c4e4",
"enabled": 1
}
},
"cdrecord": {
"100": {
"checksum": "sha256:dda8d62c3bf2503ff9762bd031c35a76cac8059d08592fe23e4d3fe11b0ac8cc",
"enabled": 1
}
},
"certmaster": {
"100": {
"checksum": "sha256:b431dd84f2c6b971bc573674fa6c4ee2fedf910b0123ba5d9acb5011c208fd72",
"enabled": 1
}
},
"certmonger": {
"100": {
"checksum": "sha256:965ec65dfc98cbabce2350bd52fa7ce92c2f4ab4704348f1555f2a3d9edfd1b8",
"enabled": 1
}
},
"certwatch": {
"100": {
"checksum": "sha256:77f0299f67e43927eacb553d1002beeebc3098b4bee64d8dc3dadb8fd23fbb5c",
"enabled": 1
}
},
"cfengine": {
"100": {
"checksum": "sha256:c78b908838f1d64ee9ebb0a51b7fa438527716936471a573e1b4b7c393bd6b8d",
"enabled": 1
}
},
"cgdcbxd": {
"100": {
"checksum": "sha256:5d3633e0b77db69721e4f64167d7e5f7779c3e5fa76e095d25f8467f2a0bdfec",
"enabled": 1
}
},
"cgroup": {
"100": {
"checksum": "sha256:9368c6c54bd5ec6f20e4c3b47c86e60af07346c4e86e525b6bd7288b54b7e774",
"enabled": 1
}
},
"chrome": {
"100": {
"checksum": "sha256:d31ce9d2fe78cafcd5e3c8decf22ae1e9ea6f74026ca65b6320afe9a33cd609a",
"enabled": 1
}
},
"chronyd": {
"100": {
"checksum": "sha256:7d9624729861397cf7720c2324c65489a3d30485e6a884ab1ff9a8ca22efa678",
"enabled": 1
}
},
"cinder": {
"100": {
"checksum": "sha256:fc169721c78f5b0857ed8312e59ba4c134b685c4c322dae242b92e815e35e6fb",
"enabled": 1
}
},
"cipe": {
"100": {
"checksum": "sha256:02c20398b9eff51ed431b7ad739a6015d2451b4bf6e3e5da380606d85a77852c",
"enabled": 1
}
},
"clock": {
"100": {
"checksum": "sha256:4f90655d2243cfc32ea7436a953cccb8a34af895f83361235a3a5cda40dbc75f",
"enabled": 1
}
},
"clogd": {
"100": {
"checksum": "sha256:ba78a422a10b65591c48cb038f8a55614944163f3140275852d293fb0c548bfa",
"enabled": 1
}
},
"cloudform": {
"100": {
"checksum": "sha256:481f5fbc7810a5a81851edbe5a6b124141257f5fbbb83d8830ae0a34924ed3d9",
"enabled": 1
}
},
"cmirrord": {
"100": {
"checksum": "sha256:8f8fb986f15b8b7c5c250d250fdbbb2f78874e13394105c9c486488a16e94c91",
"enabled": 1
}
},
"cobbler": {
"100": {
"checksum": "sha256:e0e264b9cc83962dbbb27c152a72f01c6a355467c4e845b52e65c8b88d8d75d6",
"enabled": 1
}
},
"cockpit": {
"100": {
"checksum": "sha256:cb7fccd94903a6e256a586d758085f6f59c0f8b1c5b4cb99536915526d2224ec",
"enabled": 1
}
},
"collectd": {
"100": {
"checksum": "sha256:7f08e2e248d33162dc9b237c37ed3a3dba0511bbcc71d87482e95093fb8c6456",
"enabled": 1
}
},
"colord": {
"100": {
"checksum": "sha256:86e58c9f12c519a2c3b090b64a276722374054ea900c775b2f8ab4ef2867dcf0",
"enabled": 1
}
},
"comsat": {
"100": {
"checksum": "sha256:1d57ffaad6b96e3ca8ac82c23b52d58d81e1f69f5d54a648a16da8ffa8070e53",
"enabled": 1
}
},
"condor": {
"100": {
"checksum": "sha256:dbc3f2f0c12f9aeed14056fd7e7c46a4ecab3569198f891643172cd032f3fc00",
"enabled": 1
}
},
"conman": {
"100": {
"checksum": "sha256:1270caf15af248a487cd5ce728daae2699ffd9139823c805ec49213ab1c835cb",
"enabled": 1
}
},
"conntrackd": {
"100": {
"checksum": "sha256:56fd7d7a550dbc4188b93afd0fde8c706623b3a5d26db265ee016967ba4ddfee",
"enabled": 1
}
},
"consolekit": {
"100": {
"checksum": "sha256:5bd7a7acc191766583d933b04321e64657138959bf40a4d2986b013b942c4ba8",
"enabled": 1
}
},
"couchdb": {
"100": {
"checksum": "sha256:12b2e3e7314bda4e76d3883901e6470927e85343f742fb44b174ce968f1ad8b5",
"enabled": 1
}
},
"courier": {
"100": {
"checksum": "sha256:40ae5f173004741838002644e5bff73cf16f2f3a1928c45fa17674f9a0df5148",
"enabled": 1
}
},
"cpucontrol": {
"100": {
"checksum": "sha256:1485a6d64d00619898d2789d27391f2a57a7fb1f0e8c73daf59baca8641564a3",
"enabled": 1
}
},
"cpufreqselector": {
"100": {
"checksum": "sha256:687564eb09acf3e7f1475fe2a133941c36999bd037aa8a794feea2d9f2c26385",
"enabled": 1
}
},
"cpuplug": {
"100": {
"checksum": "sha256:c16e376ff6c51da1911e68a8a7d42f5730eda45febfd0875c78cac4b9cf6e78c",
"enabled": 1
}
},
"cron": {
"100": {
"checksum": "sha256:6be0252b3c6bcbfb4c51dfd3ae1ae262f5de153234917ac4d342b18ae0292060",
"enabled": 1
}
},
"ctdb": {
"100": {
"checksum": "sha256:06dd65a4361bf8076c14b322dd30003295c0b9d75bf1ae610961b13a1f9431da",
"enabled": 1
}
},
"cups": {
"100": {
"checksum": "sha256:3d5e5bbf131d98d95f7f1431893eb137bd833dbfd8469f9c386d72bb4e8f9b9a",
"enabled": 1
}
},
"cvs": {
"100": {
"checksum": "sha256:bbc8d76cc8609849d5b078c5b2ac7364470a06d77d67b97d5f58429d7b679e33",
"enabled": 1
}
},
"cyphesis": {
"100": {
"checksum": "sha256:b1a41211ae3cf69b819df517eccd0fda2088c27685dad68de64531b9794ec518",
"enabled": 1
}
},
"cyrus": {
"100": {
"checksum": "sha256:60defb1f6feeb1d607734c4912e52e03bf5b0c27cb6f31a37fa7e05f3497b323",
"enabled": 1
}
},
"daemontools": {
"100": {
"checksum": "sha256:1034e2442c975dd2ccf84791b1a826d02032f13762d57c3485e51e2b9a7dc03f",
"enabled": 1
}
},
"dbadm": {
"100": {
"checksum": "sha256:40306590ef444152ae18b65040d85442c14853a9cc4c31b0224c4d19517d66ea",
"enabled": 1
}
},
"dbskk": {
"100": {
"checksum": "sha256:24559eff82b251f9814ae88c36a7cbacda1ed419a80145aef545306e88cb0da8",
"enabled": 1
}
},
"dbus": {
"100": {
"checksum": "sha256:50ea4eb05a06315449092c939e2307436ac6461e47ca69f0d42cc4e321e86280",
"enabled": 1
}
},
"dcc": {
"100": {
"checksum": "sha256:06e414b0a83b49968f62018cecde48dcfe68b2e9d699915367b3e04461188a0d",
"enabled": 1
}
},
"ddclient": {
"100": {
"checksum": "sha256:73ca2525a14e3161524f6e8fc0d016430a536002f1cb3833db1334670b458436",
"enabled": 1
}
},
"denyhosts": {
"100": {
"checksum": "sha256:1bd00b13b9bda18274a771d66f7cba8fe62e5e95ea8f51415da6b1fa1336df1b",
"enabled": 1
}
},
"devicekit": {
"100": {
"checksum": "sha256:03b01b781881cc60438bc357bd60596970b8ac019b415969bca8a08358fcbfd1",
"enabled": 1
}
},
"dhcp": {
"100": {
"checksum": "sha256:2ad95a78468f7f4ea9a8c044c79c0a4ca9924b41432390ea2863a85c806c9a00",
"enabled": 1
}
},
"dictd": {
"100": {
"checksum": "sha256:c30c819f142d3c719d0ec5741af5a65161770ff140097fe63f7559d55b897500",
"enabled": 1
}
},
"dirsrv": {
"100": {
"checksum": "sha256:50efdc68200d27ce1a5db99a780aa7b0e84988669961d436d348c7bb8310d181",
"enabled": 1
}
},
"dirsrv-admin": {
"100": {
"checksum": "sha256:8d9234157484f6ae8ba22039b44fa19f4de8137be9321e5da393d72d85d89487",
"enabled": 1
}
},
"dmesg": {
"100": {
"checksum": "sha256:8b834312a2cb99ab89862f839a1315e78794dd92758785f84c9559285dfbe679",
"enabled": 1
}
},
"dmidecode": {
"100": {
"checksum": "sha256:2c7fb8c6c52f385b819713f0444a96cfd4e65b7dcb3ca79b932cc12ad9ce903d",
"enabled": 1
}
},
"dnsmasq": {
"100": {
"checksum": "sha256:44f66c5d4f635600ee9d0ba3fdea3896218f1420b5ead89e0f22d71a447f9e97",
"enabled": 1
}
},
"dnssec": {
"100": {
"checksum": "sha256:49427a9e92b87db77706e2b81ece254c99d3cd6ba020211e2afae65fab7ad066",
"enabled": 1
}
},
"dovecot": {
"100": {
"checksum": "sha256:cc8c3a2ee0233a7f1fdf38837b72ce5fd15efef782a36ab4b9aa2ec339b46fa6",
"enabled": 1
}
},
"drbd": {
"100": {
"checksum": "sha256:b66be23c1ded4e548e5369b744c7c2a4dfd7065582517525221177ca67657525",
"enabled": 1
}
},
"dspam": {
"100": {
"checksum": "sha256:5dd7221ba40e9b912367289fed8ca116c14da4fb8bd7f28f421c4008855bb9fc",
"enabled": 1
}
},
"entropyd": {
"100": {
"checksum": "sha256:0f68aeeb1da72efb8c9b1bb7db0a4180b6938672b16f33d1abcd65f5481d85a9",
"enabled": 1
}
},
"exim": {
"100": {
"checksum": "sha256:f4c4473ee49394e0e4629023772464a046c476f92b4a727acdf9f6c92711b952",
"enabled": 1
}
},
"fail2ban": {
"100": {
"checksum": "sha256:2383cb88b81bc5d87be9f3201a42da526532c4ea8e6d3b3f5023005c0ddf6f17",
"enabled": 1
}
},
"fcoe": {
"100": {
"checksum": "sha256:913e66ac5f5ce364e5ea556acfbf77845c25a4beb5ee64599613aa00127c1492",
"enabled": 1
}
},
"fetchmail": {
"100": {
"checksum": "sha256:63f00993bae4285eff5e993d208ea786785c4331e6947b3a48a97d31145b2e98",
"enabled": 1
}
},
"finger": {
"100": {
"checksum": "sha256:16c506d472b007f7d36850810ca0fcfd9482d30ce9c0ba790174b78294fd1d74",
"enabled": 1
}
},
"firewalld": {
"100": {
"checksum": "sha256:bbf58446f30b93de19e5a19087ee012f8e347fef5e7e8012e64b31a0ec21ab09",
"enabled": 1
}
},
"firewallgui": {
"100": {
"checksum": "sha256:b61ff17eee03141c9c7bd79d63331ecea733cba4b5b43b87d5141a40cdccdd69",
"enabled": 1
}
},
"firstboot": {
"100": {
"checksum": "sha256:c5540b8385c84075dd657e390d77ae886aa9d74b65444b9aa1d858f375819a8c",
"enabled": 1
}
},
"fprintd": {
"100": {
"checksum": "sha256:c1ffb7734a0359a7390830d9c6477ab61c45fc026368bfd5e2246523a6439464",
"enabled": 1
}
},
"freeipmi": {
"100": {
"checksum": "sha256:9af2291d75a2d643f53ff7a98bcabf22effb617329178efea45372d714825de1",
"enabled": 1
}
},
"freqset": {
"100": {
"checksum": "sha256:28bf77389f3e41743b30727a891609172a891466e92c28a919f43e628cc23a4d",
"enabled": 1
}
},
"fstools": {
"100": {
"checksum": "sha256:140caf542903419ee2471fd99ab06aa45899c400402c2580b395b182f24bd225",
"enabled": 1
}
},
"ftp": {
"100": {
"checksum": "sha256:7e8456fdf7807b30e1c257e568ba10305696cf5abdebc70988c288079884d46b",
"enabled": 1
}
},
"fwupd": {
"100": {
"checksum": "sha256:1dd6a45b73c7ce77a87af1e87354ada5aa5b2841aaaa045a6b4ae3c4d09f0f8b",
"enabled": 1
}
},
"games": {
"100": {
"checksum": "sha256:950d8be99d5349a3d893ba601c518e6b2af0d56c5b55514a45dbd8a3c61c9ecc",
"enabled": 1
}
},
"gdomap": {
"100": {
"checksum": "sha256:5040cb99d007fe9368bd37a9a6bf82f891c220ef652443896a0f2f6ca6f818e1",
"enabled": 1
}
},
"geoclue": {
"100": {
"checksum": "sha256:f0155b43152b6b4b850d1c4fb7daf16fd77992313b8be314ddb4901314bf913d",
"enabled": 1
}
},
"getty": {
"100": {
"checksum": "sha256:a60d07665b0ebd25fd54a9d82dad5eb7acbc11a2842dba56d7b9524d26ce14ce",
"enabled": 1
}
},
"git": {
"100": {
"checksum": "sha256:5eaccf209092db49c9a48d84e1387c1de76cb153c774c0bd615c001afab28664",
"enabled": 1
}
},
"gitosis": {
"100": {
"checksum": "sha256:b522382b64f36cf387cd892b45e916c861bd0a09697bc983eb55b53b0efd3081",
"enabled": 1
}
},
"glance": {
"100": {
"checksum": "sha256:2c51d19fca6ee40e137245ecb425edc77666d75c42ba583bf74cf13f10ace055",
"enabled": 1
}
},
"gnome": {
"100": {
"checksum": "sha256:420b9cefa6bdb542f6da10de7b36704a91509cf64cd2497e5693a858cfca5e41",
"enabled": 1
}
},
"gpg": {
"100": {
"checksum": "sha256:f821aa6ca5837a2d2de8180e74c267f68da951960c989fb13ebde5833c93738e",
"enabled": 1
}
},
"gpm": {
"100": {
"checksum": "sha256:bf30c4945be0065672fb47f70ad251b1079ada339f61f2679293cb0226d0d57a",
"enabled": 1
}
},
"gpsd": {
"100": {
"checksum": "sha256:5373b2332959d6c41c32160018274ab61e3f1abd0f0a5cc2302c45b141a39a9b",
"enabled": 1
}
},
"gssproxy": {
"100": {
"checksum": "sha256:7528c47be91a81ac19f2f54458309baeb0a232d83a1ccb2bd89fbc8cefb1ddc8",
"enabled": 1
}
},
"guest": {
"100": {
"checksum": "sha256:91f43e4d5ae283f0aa13c49efea93293dbdecd2b2f4f75db89371eda65b7523e",
"enabled": 1
}
},
"hddtemp": {
"100": {
"checksum": "sha256:f170e1da6acae4fd7108d22c8cf262916e034f0d3edbdebf3265a922a5355373",
"enabled": 1
}
},
"hostapd": {
"100": {
"checksum": "sha256:8b15f72328885c08bfda38082a62feeaa2c6692223a4d2bd1a572820d454a742",
"enabled": 1
}
},
"hostname": {
"100": {
"checksum": "sha256:e9fc1c4032c0346f751e1ef8ad1b3fe3425401b70a6c4354d4485472288e0bc5",
"enabled": 1
}
},
"hsqldb": {
"100": {
"checksum": "sha256:f70b198e5a5157722b69dc89109c4074a475e1085356cc610cc9b700567c154d",
"enabled": 1
}
},
"hwloc": {
"100": {
"checksum": "sha256:370e9eea2b927a2715018b667e9a56ad09af301a90811cd9b041da79f5384b38",
"enabled": 1
}
},
"hypervkvp": {
"100": {
"checksum": "sha256:b54ce6f4960a02d35e19d60bf8a07f7866514893e3193a5f4822c8580a46caa4",
"enabled": 1
}
},
"ibacm": {
"100": {
"checksum": "sha256:663b35f3874583ae074924bc068a8dc4c7c144adb60007da6103d1e3505ee37a",
"enabled": 1
}
},
"icecast": {
"100": {
"checksum": "sha256:dedaddef1d7447d25a1e7ff01e60e4545606e556c6770bd3fa94d9331de7a5d7",
"enabled": 1
}
},
"inetd": {
"100": {
"checksum": "sha256:ae408578a7160f2feae10269365558c43d9570b392642a92cc20f8ad47c58cce",
"enabled": 1
}
},
"init": {
"100": {
"checksum": "sha256:7ff95566a4f2bdb8ca3ec67acdade39e35fdabc57c2f00b989bab3f699f997f8",
"enabled": 1
}
},
"inn": {
"100": {
"checksum": "sha256:9ad99284192a443aa582e73b46667388b7a219dafae8dfce71a58a82bbae2f6c",
"enabled": 1
}
},
"insights_client": {
"100": {
"checksum": "sha256:0e41289d8dce065dcd41fd6cc1e1282efd4a58e7f9e3a2f1abc32f520fbbcc1e",
"enabled": 1
}
},
"iodine": {
"100": {
"checksum": "sha256:32501ab66def044fbc340cb5c656d5743c738bbd6fca5626c36c687419cd8d32",
"enabled": 1
}
},
"iotop": {
"100": {
"checksum": "sha256:d15656cd91a4e4e178a13f7cf910cfc552cc30db881a11ec88833f947edb4561",
"enabled": 1
}
},
"ipmievd": {
"100": {
"checksum": "sha256:d34fe186922c0e5726ca361343ec3846833ec3e4ab9b019b3d7bac1337383a16",
"enabled": 1
}
},
"ipsec": {
"100": {
"checksum": "sha256:d36c66c2c79d338c61c90d4136433e1e3a73435e920eb36d70682dfd5e147e59",
"enabled": 1
}
},
"iptables": {
"100": {
"checksum": "sha256:5a674017cc648e3262757464e5413503154cc1f593da545ce2c4f946991012bc",
"enabled": 1
}
},
"irc": {
"100": {
"checksum": "sha256:d72428ccbff5521367e00699c142bba64b2bbd44fed35deb29f9530cc0448378",
"enabled": 1
}
},
"irqbalance": {
"100": {
"checksum": "sha256:15650b2f39ccdfbcb1e4e867a35fce3c2768097e611e0c8ad9cb79ae6c66dd58",
"enabled": 1
}
},
"iscsi": {
"100": {
"checksum": "sha256:ccb27142f793095c79f531aae924baaeee5914c84228a09c09b9eca839f3524e",
"enabled": 1
}
},
"isns": {
"100": {
"checksum": "sha256:90b42f610fa328cdfb98bd0450bd052566f203e51e4a913dd6faded6da7fbe2c",
"enabled": 1
}
},
"jabber": {
"100": {
"checksum": "sha256:5ad49d140265305dc72781a6826d1de4614a33f83bd512acdc2263038ad41206",
"enabled": 1
}
},
"jetty": {
"100": {
"checksum": "sha256:d910afd1bfe836543ded50974dc24ae7bd5fd2609d6a9b2403316dffcd39832d",
"enabled": 1
}
},
"jockey": {
"100": {
"checksum": "sha256:d9a67ce1976ed2e79826d25f33dcb0b0bbde6c090600b605bbaaae45856d12f6",
"enabled": 1
}
},
"journalctl": {
"100": {
"checksum": "sha256:9ddb71271d0dbe5cede6179c0ca263e297dc6b65197bde2f7b14ce71f8dde369",
"enabled": 1
}
},
"kdbus": {
"100": {
"checksum": "sha256:5969c78be4a03cc91e426bc19b13c5188b5bf8ac11f5e2c21c098c3d68a7e3e3",
"enabled": 1
}
},
"kdump": {
"100": {
"checksum": "sha256:fdde3852d1decda649133c6345680f9353b86a6da2a98a83a8be101c9c25f103",
"enabled": 1
}
},
"kdumpgui": {
"100": {
"checksum": "sha256:66c67280c70a9b897b0f952067438e0eee05f9f48913508b38d745ef88747f32",
"enabled": 1
}
},
"keepalived": {
"100": {
"checksum": "sha256:c1177567c7bf67bb2d0de17760cecf56e0bb34f50d6fe060dec64ae97a76ecdb",
"enabled": 1
}
},
"kerberos": {
"100": {
"checksum": "sha256:826fbe83705494e009b242b88857c425eacba49aadae506ffa2012c80e60f7ae",
"enabled": 1
}
},
"keyboardd": {
"100": {
"checksum": "sha256:f199811d9ddc8db83864a09c543567fcb2f117b3241967b092bff7c9fdbfbfb6",
"enabled": 1
}
},
"keystone": {
"100": {
"checksum": "sha256:b0a7227a870ea987035e0cd524ad956a68287d0a67dd7135de41c6d5977ff4c2",
"enabled": 1
}
},
"kismet": {
"100": {
"checksum": "sha256:488fb5fd17cf1f630f3e48a853da05f86c06fc58219dc2ae59251865734bf800",
"enabled": 1
}
},
"kmscon": {
"100": {
"checksum": "sha256:d64019b11b6a37f6cdc5579d56eb1e19b6a7231501e1cfe2a838d26a2eac6033",
"enabled": 1
}
},
"kpatch": {
"100": {
"checksum": "sha256:00070d71dfe2632491305387ffb264127dca4387425015e4cb013d6bce5f95c3",
"enabled": 1
}
},
"ksmtuned": {
"100": {
"checksum": "sha256:891f082452240ad2e726bad777ea787d0f0f8695cc2a75f7439a2badda030d24",
"enabled": 1
}
},
"ktalk": {
"100": {
"checksum": "sha256:2df6f3dbad4a513ee1c113e496e8d2f5a19f56015f4a21e7478f2f5b53f36359",
"enabled": 1
}
},
"l2tp": {
"100": {
"checksum": "sha256:8e4cb0b0e0d1293d669de0b0e50f68d6d6fbe8e8d830a236a1c0e676f2326fb2",
"enabled": 1
}
},
"ldap": {
"100": {
"checksum": "sha256:d0177bb5873d0e6f9595020a8f39ba06b19e4636ea610175a3afef4aec2719cb",
"enabled": 1
}
},
"libraries": {
"100": {
"checksum": "sha256:6d5f128f2d4fd9137a7c70d0d024703547796a71f70017b3550a31d3450e0435",
"enabled": 1
}
},
"likewise": {
"100": {
"checksum": "sha256:e7eebd050230b358b43435d37ce308c3ba15e2516f4045abf7d26f03ebfbc11c",
"enabled": 1
}
},
"linuxptp": {
"100": {
"checksum": "sha256:4132cd51913a3044e453ed0b972db2ef511fdc7b2a1b592d1070177651066ab9",
"enabled": 1
}
},
"lircd": {
"100": {
"checksum": "sha256:cc81b79d2834e58bef7928f525c1a1eee5547e81d195444b3bc2741e396ae46b",
"enabled": 1
}
},
"livecd": {
"100": {
"checksum": "sha256:805c7bc4ded621b44ecf333d558328e115bba652fcbc91f436cefc948497688e",
"enabled": 1
}
},
"lldpad": {
"100": {
"checksum": "sha256:358c4b262655cffbf20f7484aedb22f094509f44d52a1fa3efe3edeafd99317e",
"enabled": 1
}
},
"loadkeys": {
"100": {
"checksum": "sha256:26f9e78406ecdc968ed670b32db1d10805e66875631558f092f08a6e1f2170dc",
"enabled": 1
}
},
"locallogin": {
"100": {
"checksum": "sha256:e07d92775ed25e7a3627bf977452844c67acf473b33075475f433f8be76dd755",
"enabled": 1
}
},
"lockdev": {
"100": {
"checksum": "sha256:1f946da2054cc1693209749df12ff01ab6456247d6225733aebb3a7d70a46e20",
"enabled": 1
}
},
"logadm": {
"100": {
"checksum": "sha256:70546c4b3d01f15bc7a69747dbb12fc6bcef5d899f6301f62c0c612c7069082a",
"enabled": 1
}
},
"logging": {
"100": {
"checksum": "sha256:656067c78ff1246a1a758a213d44307f91cb79336fe74a47015af425e58266fc",
"enabled": 1
}
},
"logrotate": {
"100": {
"checksum": "sha256:76cc40f1943fe21959793499bffaf35d0fe53ffc3f6c5a8b31eb96e738a286c2",
"enabled": 1
}
},
"logwatch": {
"100": {
"checksum": "sha256:cf4450b03e28762040c29f2a28af238cd4905d1c6bd4c73d656b266c7b9a8a6c",
"enabled": 1
}
},
"lpd": {
"100": {
"checksum": "sha256:9358dc35659b9570d3e8119a088b2693d7de505ea25996dc139517a857888857",
"enabled": 1
}
},
"lsm": {
"100": {
"checksum": "sha256:1247dc4bccfbc9ee42292db4415b21ae00bdef3dc2faeb267f045413da4a1b1b",
"enabled": 1
}
},
"lttng-tools": {
"100": {
"checksum": "sha256:79e4a2224ede13cd5f2c0e6e7c61e83efabaf1d05b86f6f7a710599bfc48edaf",
"enabled": 1
}
},
"lvm": {
"100": {
"checksum": "sha256:f56137657dd61a1a8a8844d5d1db01fc03330d17e05457d03f64756b344c32ef",
"enabled": 1
}
},
"mailman": {
"100": {
"checksum": "sha256:e47811cf3bd8204eaa02c4aab92f3d426f0a3ef97161e1579845d1e03df1fc1d",
"enabled": 1
}
},
"mailscanner": {
"100": {
"checksum": "sha256:8d447072ab5005ead27f1cb4d96dcbedf09a11182f660c6f59c6d56fd81235d8",
"enabled": 1
}
},
"man2html": {
"100": {
"checksum": "sha256:224584babd9e83c242d54fd8c5cd03379b0556005268aac22b15734b913f12e6",
"enabled": 1
}
},
"mandb": {
"100": {
"checksum": "sha256:ae44b8ec7a90ebbc45fdafe89663197b36e47120ad90eb22b475939055ea6924",
"enabled": 1
}
},
"mcelog": {
"100": {
"checksum": "sha256:c5d98ec368b145c74b4bf0ea8da3980b17af0c2d00654c5a6973241625f97b12",
"enabled": 1
}
},
"mediawiki": {
"100": {
"checksum": "sha256:43f1c6f7cfdeaa26891824167cf637a8670785c2674b45d85ce4a7ac77190a36",
"enabled": 1
}
},
"memcached": {
"100": {
"checksum": "sha256:f0f9c7367e9bd196aa463916bd5aab02f6966dad9564a0f2fd070bb2e8410aeb",
"enabled": 1
}
},
"milter": {
"100": {
"checksum": "sha256:db190bacd2b84a29971cd1940cd15d606abbfded5c9b956894717afd91fc7a0d",
"enabled": 1
}
},
"minidlna": {
"100": {
"checksum": "sha256:0d6ac660d641c1cf707a814ed08e19b9e21547a3eaa7134cab84dbc5fee6b5b2",
"enabled": 1
}
},
"minissdpd": {
"100": {
"checksum": "sha256:dd2ab85bcba6d204f9dbc7304e8a4940e5d1733d4b9cf4fcb0f4072982c585c3",
"enabled": 1
}
},
"mip6d": {
"100": {
"checksum": "sha256:406edf2c78ba0e692d5a78f3c5ca8d641d00131b143332adeaad9f325959683a",
"enabled": 1
}
},
"mirrormanager": {
"100": {
"checksum": "sha256:7084de59beaaaf4f630357ec53beff8d0a0ee532ac180fe58e23bfe98f1fdaee",
"enabled": 1
}
},
"miscfiles": {
"100": {
"checksum": "sha256:7e7e87e302bf847a4c59d69e5af60729e61bada0cc5d6ec17a25a6514476cb48",
"enabled": 1
}
},
"mock": {
"100": {
"checksum": "sha256:ae352eccf2f2c9ee8f0d9635517d9ae3c9bba83c617deca8f989e2aae8dd35fa",
"enabled": 1
}
},
"modemmanager": {
"100": {
"checksum": "sha256:84a60147d2b0121ff6ede6199583cdb5619480d015b2a675c6a0569f91c12d66",
"enabled": 1
}
},
"modutils": {
"100": {
"checksum": "sha256:67c3914aeb25e38fc6bd0793fddc41122dba1547d54e91a78065545fea3b9c87",
"enabled": 1
}
},
"mojomojo": {
"100": {
"checksum": "sha256:6030afcea9f8d46f25dd7785737edd25eb0f1e50b76eafe4d9103196b722d47e",
"enabled": 1
}
},
"mon_statd": {
"100": {
"checksum": "sha256:6ba3a594d01a11bc32e7cb554f7386314b5089eb4416fb776edb552a7d53c41d",
"enabled": 1
}
},
"mongodb": {
"100": {
"checksum": "sha256:1b2d30558bec7fc08d1d388ae2bb0becd2233c99c9fb173fd00809786ce5eed9",
"enabled": 1
}
},
"motion": {
"100": {
"checksum": "sha256:346e172be35df168eb0e4fbc8e176b0fda87de9bc5787f7a5ab7667cfe1e3c3b",
"enabled": 1
}
},
"mount": {
"100": {
"checksum": "sha256:f66c53d993dcd47ea1ff3d797f8fd69fb8161a4ff8a59f54f66a2de9462a55a7",
"enabled": 1
}
},
"mozilla": {
"100": {
"checksum": "sha256:7696dbb77c54531cf2574c7ede9f085cf64611dcf7a612530dce2de19f7a8b9f",
"enabled": 1
}
},
"mpd": {
"100": {
"checksum": "sha256:0f67c18c9101b53f57ef857a74d6044701e1d2c347f829a03c0579c545fdbef3",
"enabled": 1
}
},
"mplayer": {
"100": {
"checksum": "sha256:f82c0a72506f1011e47ba98e51d5edf906f58fc190d797f5d1a0b8e5cc7d0762",
"enabled": 1
}
},
"mrtg": {
"100": {
"checksum": "sha256:afcd9267261b334900420461279b8555fdb4bd783af880fa4606d8afc65e0712",
"enabled": 1
}
},
"mta": {
"100": {
"checksum": "sha256:b0f9753424c504a288f55d495105f6d475d69287b718190ae5192cf7d6ddfde6",
"enabled": 1
}
},
"munin": {
"100": {
"checksum": "sha256:29f87ec15fa19e975c83288d55e56bab64855a24c4d8826fe4138eda9a46cc97",
"enabled": 1
}
},
"mysql": {
"100": {
"checksum": "sha256:b028af8f4e726feb8c26037f7c6d6f97383977bd5ee6141ab4e8e1d096d6481f",
"enabled": 1
}
},
"mythtv": {
"100": {
"checksum": "sha256:e025b2dbf50901632da0ee2aa658105a322275eb120d782cbbf25f2895231154",
"enabled": 1
}
},
"naemon": {
"100": {
"checksum": "sha256:a19b3b0540dc52d9506ca7e5d804c2fe9115b3ea28bfd9273030e841e12eb277",
"enabled": 1
}
},
"nagios": {
"100": {
"checksum": "sha256:39ca80027ac8585f368bcd57f555ba87bf409f7b7d6c4292c09fd06cc1691c80",
"enabled": 1
}
},
"namespace": {
"100": {
"checksum": "sha256:ef73850f29b4ff4ff904d506d545bf366fd1e7c2ba82a7a7c9a4513e3eee45d9",
"enabled": 1
}
},
"ncftool": {
"100": {
"checksum": "sha256:2c9356101a9ddbec94afdd12ca669ba93a1d422c302f9e17b78b18670617d2a1",
"enabled": 1
}
},
"netlabel": {
"100": {
"checksum": "sha256:9a32ce04c1dd8e120588c15b3057f838bedce8f14c91576b667295d47800e0ad",
"enabled": 1
}
},
"netutils": {
"100": {
"checksum": "sha256:5e0a20ae09b00fac69ee30a0d55ff73fa692d8350c9c0b0343af61e4f0dd654f",
"enabled": 1
}
},
"networkmanager": {
"100": {
"checksum": "sha256:9c67b21155929e43e4efd3fc81a85fddc9f1030b47ee4a275789014c1311b972",
"enabled": 1
}
},
"ninfod": {
"100": {
"checksum": "sha256:85cac2885d75522eb07189efcc3feeb7775fc6daf5cf3f1a28a1fd2109fe148c",
"enabled": 1
}
},
"nis": {
"100": {
"checksum": "sha256:b5b133d60b98068eb9480c54285050ae9b49d2fb309eac8994cc91c865ee02d4",
"enabled": 1
}
},
"nova": {
"100": {
"checksum": "sha256:59919a89d30a5d4b60d6971fa636fb62605d59d214ec614adc279f6cbe2c2b27",
"enabled": 1
}
},
"nscd": {
"100": {
"checksum": "sha256:578bc975477539c659f3608b1445a0c7a9bc7c3f2dcf65b3e55f3a3af89ea564",
"enabled": 1
}
},
"nsd": {
"100": {
"checksum": "sha256:d5b03cdc6c8bbc222b8e3d30680b1a7d2d1a49837e7d509aafcf6b2a3a32195b",
"enabled": 1
}
},
"nslcd": {
"100": {
"checksum": "sha256:18b003071f4c36307616f7d5de8cff6d4e376af31cb96ce1a5ad6ae3011dfd09",
"enabled": 1
}
},
"ntop": {
"100": {
"checksum": "sha256:f942c7fbe636b9d60327ef9dade1120340c16a2992a6b50db5fbaecd44ffd63d",
"enabled": 1
}
},
"ntp": {
"100": {
"checksum": "sha256:686664a71e74b0edd643ab9d556b1aab092fa707935da5ea928a66f54a3c84e0",
"enabled": 1
}
},
"numad": {
"100": {
"checksum": "sha256:dabc5ce6244d0b0939e9a07bd6bc232e8b666529a0b7b29527e586db8224862c",
"enabled": 1
}
},
"nut": {
"100": {
"checksum": "sha256:653e708dec531e483992b25944a689ec9369478d039a5ec62c98294ab73ce8c4",
"enabled": 1
}
},
"nx": {
"100": {
"checksum": "sha256:4ae55fe839abaaf0ea52b79a5c8f6a906575b83cca29532c2dd52337fb3d5790",
"enabled": 1
}
},
"obex": {
"100": {
"checksum": "sha256:7b2c87e864b6008f734e1effa48cee1399f41843b9d80d3fd95fbd19e058598f",
"enabled": 1
}
},
"oddjob": {
"100": {
"checksum": "sha256:9de0b544b2373ea0f1b7217f9179898479dbff0da36ea9857783de57d06585cf",
"enabled": 1
}
},
"opafm": {
"100": {
"checksum": "sha256:761bf911674d23053eceabbbda8da16c73af5f300929a33a64513dc6e3b2d0af",
"enabled": 1
}
},
"openct": {
"100": {
"checksum": "sha256:5674f8e8c975570649e3065460786cb4521a86370bffef5a9de18c69813fe68e",
"enabled": 1
}
},
"opendnssec": {
"100": {
"checksum": "sha256:bdef6dbb24ae22548634759ac823a8c3e21fde6368cfdfd742480f7027e63ddd",
"enabled": 1
}
},
"openfortivpn": {
"100": {
"checksum": "sha256:1a1bff55993510cb6481383b299e1f1a6349ec76e4947bfc8c5b1347e4d30bf4",
"enabled": 1
}
},
"openhpid": {
"100": {
"checksum": "sha256:ad3f3f3ba4442930560b291c022e674e6a50e4a37fe027926299b2f6cdec14bd",
"enabled": 1
}
},
"openshift": {
"100": {
"checksum": "sha256:329e4b9d1df5012ace94cbe9cba7dfa7ee7d9f242090072c71aaacbeea78986a",
"enabled": 1
}
},
"openshift-origin": {
"100": {
"checksum": "sha256:31cbbb069354f984e4af75b387778fae1ff4dc6c3e60533357d005ffa960b51c",
"enabled": 1
}
},
"opensm": {
"100": {
"checksum": "sha256:c0e1bf0a8eb50e0b41fa69bf5b65e2a7c324e4bc7255933a5d2bac3b9ae6f4de",
"enabled": 1
}
},
"openvpn": {
"100": {
"checksum": "sha256:a4d12ae8ad77d65d0fcabb20aa4a83886e782d732123f686f88a7d7472384104",
"enabled": 1
}
},
"openvswitch": {
"100": {
"checksum": "sha256:a54f8a8ea5abb8a33734ecef9d9ad1c0dd090a6e0c5187e80de52f522d2d5e39",
"enabled": 1
}
},
"openwsman": {
"100": {
"checksum": "sha256:d6b7bb8f7749265bdaf938abecb2f8f78c6e9e8dc06c1c26b48da227af5a8654",
"enabled": 1
}
},
"oracleasm": {
"100": {
"checksum": "sha256:67e31eec391bac337ebacb78c096589af4b7e8be6aa05c34cf187ba922a2abde",
"enabled": 1
}
},
"osad": {
"100": {
"checksum": "sha256:6635ff0231bfc3d88c771553d495941ee0f98871edfe6c86205b087186b3a72f",
"enabled": 1
}
},
"pads": {
"100": {
"checksum": "sha256:5b4531e9231d399ebec8e6b6870a812c6a64b2daffde35fa57a009b24a01809f",
"enabled": 1
}
},
"passenger": {
"100": {
"checksum": "sha256:912a1c442559d6ab48453d87e2b997bdee3017a54a0b60aeaf7d4603fde0f34b",
"enabled": 1
}
},
"pcmcia": {
"100": {
"checksum": "sha256:456b3520c26e5f2a913437318715712ae00f64932a27ab1bb8b8b42e0524fa05",
"enabled": 1
}
},
"pcp": {
"100": {
"checksum": "sha256:5302332fba7e6724ab7a3c32bd523b10322c20011c6e42ae4e769a49f3efabdd",
"enabled": 1
}
},
"pcscd": {
"100": {
"checksum": "sha256:2ee37df066a9ff80439b08c092809f3661e2f9a8ad02134e839627fd23a20c1f",
"enabled": 1
}
},
"pdns": {
"100": {
"checksum": "sha256:a1a10cd52eb9dd15bc1ccfed440f6b3d235edc7405a3932f81805d8d94000245",
"enabled": 1
}
},
"pegasus": {
"100": {
"checksum": "sha256:4280c40629dd111fd1c89ff867ac72d1e7ddde49dc3d286637e6a86b868e2303",
"enabled": 1
}
},
"permissivedomains": {
"100": {
"checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2",
"enabled": 1
}
},
"pesign": {
"100": {
"checksum": "sha256:6461acd0385c0b1a32bf646fc9e09da0c7ca513954ed8fe2a03f4ee7f6a64fcf",
"enabled": 1
}
},
"pingd": {
"100": {
"checksum": "sha256:f7536a518a046b793ea3f74a67d677b878baac44b28268c5ccecbf10715d89ab",
"enabled": 1
}
},
"piranha": {
"100": {
"checksum": "sha256:11436fb7942d28e3eca22bc078ee5475f632d8447008a6414f337d4bbc3515dc",
"enabled": 1
}
},
"pkcs": {
"100": {
"checksum": "sha256:c70e17d1a4d347b38fdfbb2a5dab292e3e0c538ea52fb6cfdef2714e130da0b1",
"enabled": 1
}
},
"pkcs11proxyd": {
"100": {
"checksum": "sha256:c9582c89cac1546fa1e5bf9802c5a322e52e2529256f9e5922d5813e40be3646",
"enabled": 1
}
},
"pki": {
"100": {
"checksum": "sha256:ec40fbe6355370fe69a8ff343744654b06d4134c1518c64269be1f3a49083968",
"enabled": 1
}
},
"plymouthd": {
"100": {
"checksum": "sha256:7aa52d533e28a3ebf76d879c24bb4e0a58574033d5af6d4d22b716d1156c3f90",
"enabled": 1
}
},
"podsleuth": {
"100": {
"checksum": "sha256:b32a5cc38b8edcc76b94862cee0c822a5b4d095329f53ab6f7cb014c76346e8c",
"enabled": 1
}
},
"policykit": {
"100": {
"checksum": "sha256:686d9f7652cb2b3d7ce6af2aa620c14a6cbbbdb8d26b3630cfbf6bc34d9e3e6c",
"enabled": 1
}
},
"polipo": {
"100": {
"checksum": "sha256:6098bd8a4f449c01dc7e0f4509663994259fe8848f2f21d1319bf7105bbacc4e",
"enabled": 1
}
},
"portmap": {
"100": {
"checksum": "sha256:f561aef22cda98a94a74bedda09645e50066a77a23d3bdcbb1143b0c62ffe7b2",
"enabled": 1
}
},
"portreserve": {
"100": {
"checksum": "sha256:9de99e881e9e2e7e0b78629eec721840da4aa18f78ff5a06e46b7a596c28a09a",
"enabled": 1
}
},
"postfix": {
"100": {
"checksum": "sha256:3101c4c1d54f3e175dc3fcff001c6937a9ffec7781f4095ea38fea88df7e8067",
"enabled": 1
}
},
"postgresql": {
"100": {
"checksum": "sha256:a734cc086d7d73ef2ffe7543f82dc50b57619e78e60664cb67a9513790f3335a",
"enabled": 1
}
},
"postgrey": {
"100": {
"checksum": "sha256:ef4d03336b66c1184f352f9b3fe8004d870bbf003673d4393bde24ea14b056b8",
"enabled": 1
}
},
"ppp": {
"100": {
"checksum": "sha256:83e6712ba7343dc1346e94c51b75b05839f78bd24f9324d984b7aa9631bd0377",
"enabled": 1
}
},
"prelink": {
"100": {
"checksum": "sha256:df050b0d180947788ab45862c4627ae640c92cf0f6a994a685e4cb5fe46bef76",
"enabled": 1
}
},
"prelude": {
"100": {
"checksum": "sha256:88c5fa3da64c127ed6e688f9eba5e50a8f6f98ea3243d29b8b0bc0375ef95420",
"enabled": 1
}
},
"privoxy": {
"100": {
"checksum": "sha256:e4a84567c63c892d4cdda3a9a4b15ad5188c093da679a354f00c43b6376a844d",
"enabled": 1
}
},
"procmail": {
"100": {
"checksum": "sha256:98170eed35b67b9097514bcb044a18cc3f757af5f91b5d870ea707d6048cde75",
"enabled": 1
}
},
"prosody": {
"100": {
"checksum": "sha256:07e999e033252b28ae41697ddc23b42dbcf4bdc143c9eb1c55475aabc9fc9caf",
"enabled": 1
}
},
"psad": {
"100": {
"checksum": "sha256:7fc3410de486bf89c4d35989937f424b435c9c4f5398f47f9c840b146197c6ac",
"enabled": 1
}
},
"ptchown": {
"100": {
"checksum": "sha256:129978bcb62fdcaed728fb288b321c204575246eb535354e02bfd83089cb0ded",
"enabled": 1
}
},
"publicfile": {
"100": {
"checksum": "sha256:9cc75080e25fb5602ab266f1c0d0f16843bdfc561e7af6dec32d669e31bebe98",
"enabled": 1
}
},
"pulseaudio": {
"100": {
"checksum": "sha256:a41fc5d1275d548510a2be0180741f952f0f696f443eaabf03c1abf3f80f499e",
"enabled": 1
}
},
"puppet": {
"100": {
"checksum": "sha256:81559a7d5e16e228382840986ae0e414d4a78163a9b51b5d9c05a58e07574e8d",
"enabled": 1
}
},
"pwauth": {
"100": {
"checksum": "sha256:8590f80ce91ddd4862ce2beab9ec64deb66d99c5583ff5ee3cbff2e503caaa37",
"enabled": 1
}
},
"qmail": {
"100": {
"checksum": "sha256:917a35c0ec48acfb5166c937e97269acac39541acebad9c1c410bfdbcb483da1",
"enabled": 1
}
},
"qpid": {
"100": {
"checksum": "sha256:cfdb156d23ae6c99b3dbac171ab1626202bf1ae7671fae9f6d6f7241116638dd",
"enabled": 1
}
},
"quantum": {
"100": {
"checksum": "sha256:eb4881c554de7882b4e5590a8efb35a758fc1b3d61bc1502632d6f4e571cb331",
"enabled": 1
}
},
"quota": {
"100": {
"checksum": "sha256:27d1fb8e99c6d1c75fc8efa8aeaf4303d0dcd8d03cb2992d968a3186d648f4b9",
"enabled": 1
}
},
"rabbitmq": {
"100": {
"checksum": "sha256:f0b2b81a6670b7640d49d49c364635f39272330f08bcdaa23c681bf2ac64e10f",
"enabled": 1
}
},
"radius": {
"100": {
"checksum": "sha256:791a60cff31fca43e01aa4bfe3a57c5938015db44fd1f64064778dbbcdb6e2e2",
"enabled": 1
}
},
"radvd": {
"100": {
"checksum": "sha256:1cea7f5b37f7a0e722ecbccaa09d95db2b175ec125d62e3898a99081c51c6f96",
"enabled": 1
}
},
"raid": {
"100": {
"checksum": "sha256:a94b0b917312a73eda50ea641dee49eb00f49df286133fcdb13267fd49ce5d1f",
"enabled": 1
}
},
"rasdaemon": {
"100": {
"checksum": "sha256:159d40315f3f5086a31e6f0a6a90d342783d6f0c97c5feeb9c92808c7345adcf",
"enabled": 1
}
},
"rdisc": {
"100": {
"checksum": "sha256:a61f7efd50387ebfd35b675b22a8cba86c6216c0bbd901aab5e8674b5c442777",
"enabled": 1
}
},
"readahead": {
"100": {
"checksum": "sha256:276a24e14ef12f5fadaeab2883d501cb096e01a9ce1be2178a5c50ebfa6b3fcb",
"enabled": 1
}
},
"realmd": {
"100": {
"checksum": "sha256:61561d5f14d9a6597d6e312f5429947baab045d01a729f7cc34406e859fa0015",
"enabled": 1
}
},
"redis": {
"100": {
"checksum": "sha256:f40066828d25674c525148f890d9cc84ddbb203f5a4aaad616ef2cd3a497fdc3",
"enabled": 1
}
},
"remotelogin": {
"100": {
"checksum": "sha256:742f881c1a4838ecfc1a55a7f3b78a72267644e3a64e3ec45a191599b5bd8532",
"enabled": 1
}
},
"restraint": {
"400": {
"checksum": "sha256:5dd2b902123ef00065db6ec8d173f37baa26dbe43566bd5f06594ef1243fd5fd",
"enabled": 1
}
},
"rhcs": {
"100": {
"checksum": "sha256:67f232676ac23535867e2494f04989dbd6b9b6d4bbc67df67dc2edb4d31a8be8",
"enabled": 1
}
},
"rhev": {
"100": {
"checksum": "sha256:ee2f26beaa5c6a5d25e03ef9ab30302d6b29b283283683421fab52e29e47fe3d",
"enabled": 1
}
},
"rhgb": {
"100": {
"checksum": "sha256:39c550e1c8b149dc6f308b0f9ef238315208453ee064bb1558eff9137531840f",
"enabled": 1
}
},
"rhnsd": {
"100": {
"checksum": "sha256:16bff56244925c7696fa2da5a4c986132488c352149cc88181bf6b4143fc80ba",
"enabled": 1
}
},
"rhsmcertd": {
"100": {
"checksum": "sha256:e999510837aabb3ce118ad61225a846f687588e9a321ffe675b56511191bc323",
"enabled": 1
}
},
"rhts": {
"400": {
"checksum": "sha256:9000bd99784bc22ffda4493b4985e8c5a2e65e87aeaa1cb96ba82d367a27a8be",
"enabled": 1
}
},
"ricci": {
"100": {
"checksum": "sha256:c72c61297cf864a1abda8226de08039c8ae0212808d3f7fd8725b53b955d59f6",
"enabled": 1
}
},
"rkhunter": {
"100": {
"checksum": "sha256:d48bd9c5789f4adc396773664402ddeab432caa99597267ccdf24220948e5b3c",
"enabled": 1
}
},
"rkt": {
"100": {
"checksum": "sha256:a9414e82cadd2876471465737bd8322eb833e296869ebcefcd9e722ff717d350",
"enabled": 1
}
},
"rlogin": {
"100": {
"checksum": "sha256:a4b2e25abc4099a0a54821518b7c824a2ddb7544fb0b5ddde9a0a9be159ac1b2",
"enabled": 1
}
},
"rngd": {
"100": {
"checksum": "sha256:5c867af2674586cc1c41aa3203e3704a0d1400d344a8e257bc61e9eebb86ad03",
"enabled": 1
}
},
"rolekit": {
"100": {
"checksum": "sha256:73382d4b8a12fa161dbb5ba36c94e7f0b1f82b1abdf0a4f07ca6c981e08f271b",
"enabled": 1
}
},
"roundup": {
"100": {
"checksum": "sha256:1a2503ebaa997c6b6efd5d2343ea731f73b2f0312f2e8d5578dad2c8a84a94fa",
"enabled": 1
}
},
"rpc": {
"100": {
"checksum": "sha256:e423284f5ed36e7b6c52f581b444a981d5d1c8af6c8dabe8c6cb6c71d3f49fb2",
"enabled": 1
}
},
"rpcbind": {
"100": {
"checksum": "sha256:53831134210db04fe6e6b0f05e20b8b7307ae8c11e774faec9e1b3aa2b02b5dc",
"enabled": 1
}
},
"rpm": {
"100": {
"checksum": "sha256:acbd671bd661f9f2f25d4798f1646a51075f297c8b086ea9bd3133a00e356432",
"enabled": 1
}
},
"rrdcached": {
"100": {
"checksum": "sha256:c6110313310591ee2a08b504b04ebd1b98f370b6633172f06ee7c0c7db0a963d",
"enabled": 1
}
},
"rshd": {
"100": {
"checksum": "sha256:1340ab5daac926cc1354452869ab5aa78d27ceb110543624d2ffaf93773c394b",
"enabled": 1
}
},
"rssh": {
"100": {
"checksum": "sha256:9dabc52612d567e728786c007f5017c7032c02be3a9201521a530fc91ca789f8",
"enabled": 1
}
},
"rsync": {
"100": {
"checksum": "sha256:33dffe2764dc45bbc59b406a67187c39864412bac07ee089bda30ef09cb70faa",
"enabled": 1
}
},
"rtas": {
"100": {
"checksum": "sha256:9d55dfe843e44e8a93c02ea28b14856edfdb1f820bb647992daa6af11e2dbd37",
"enabled": 1
}
},
"rtkit": {
"100": {
"checksum": "sha256:ea77b9f26c8fc61b7fc281099b2f16e75c5b196660fff55a95f96e97935a7a1b",
"enabled": 1
}
},
"rwho": {
"100": {
"checksum": "sha256:4468bfdd23924a96b4cf8c6fa1a3fa606fdd8ac69b7cb17c16a6e39a95908921",
"enabled": 1
}
},
"samba": {
"100": {
"checksum": "sha256:c97b92abaf053976c89a670d82bf06bc5c7d561ccf03e3ff1ac84be6e01cfc5c",
"enabled": 1
}
},
"sambagui": {
"100": {
"checksum": "sha256:18d1a69de368fa621e8ef3234b8ddb40261ced880bb732328a310db5a62a7a0a",
"enabled": 1
}
},
"sandboxX": {
"100": {
"checksum": "sha256:711df017c1f168e33245144d67289225439bbed701fb1146cb83e9cd63ce1f7a",
"enabled": 1
}
},
"sanlock": {
"100": {
"checksum": "sha256:093d9d9793142bb9a8c4375f5f368ca1a4d9beb0cd05329518f91bb9ea51bd06",
"enabled": 1
}
},
"sasl": {
"100": {
"checksum": "sha256:536ce94509d38b40200debf17fbddc16ec9004463fdb3fc42890dde9b3eb56f1",
"enabled": 1
}
},
"sbd": {
"100": {
"checksum": "sha256:57ecac942ea46af55728362527d70a3e135c3b4711688ddf62596b9a768d9fb0",
"enabled": 1
}
},
"sblim": {
"100": {
"checksum": "sha256:2ab2f52e6bac063f176e007b39cd8a4e43012ea075d82af20fbb3403891b6493",
"enabled": 1
}
},
"screen": {
"100": {
"checksum": "sha256:7df09c8fa09e105ecf51fee797975603a2df8d15c3a0bf00fdb1d565fe4a6b91",
"enabled": 1
}
},
"secadm": {
"100": {
"checksum": "sha256:9cf04d33aa9dec0b559c892fb20df89fbe1883544d4ac2d6bf6fc319f0a16663",
"enabled": 1
}
},
"sectoolm": {
"100": {
"checksum": "sha256:e7f9a696e0958d6bdbd6696e67a9b4af62430456d0f278e290db0ea1ee9750b7",
"enabled": 1
}
},
"selinuxutil": {
"100": {
"checksum": "sha256:c72355dc70789deb94777acd0b47c2c3ae628e8d90bffb0e0e320941e5ddf3b7",
"enabled": 1
}
},
"sendmail": {
"100": {
"checksum": "sha256:98f68238d6ca96277390c160adeed4e3e382d5ded5a88a3909cfebe986b849be",
"enabled": 1
}
},
"sensord": {
"100": {
"checksum": "sha256:10ca96a581ef4b0fa1789160fd71fb340d8b1d13906b42fab6e9119033d4f942",
"enabled": 1
}
},
"setrans": {
"100": {
"checksum": "sha256:3a172b4972f9271250b4d228541c78b0243fd0544ac983db0f590e09674f700d",
"enabled": 1
}
},
"setroubleshoot": {
"100": {
"checksum": "sha256:f78edfcb470cd9929f45b6db29ae4924a286ab30a03f80b7bdf3699bccb98314",
"enabled": 1
}
},
"seunshare": {
"100": {
"checksum": "sha256:ba2043d9665e2fd3a9e2d103671bfe647060b93d9c02eed2dca3066a0ecfb81d",
"enabled": 1
}
},
"sge": {
"100": {
"checksum": "sha256:cf843c98ff4113ded675f79df694549b4f848aecb1295f0a510101e301fbd348",
"enabled": 1
}
},
"shorewall": {
"100": {
"checksum": "sha256:c7c49d28e52aba4d168e684b9160a225fbecab373bfbb6963bbe89c93ecb867b",
"enabled": 1
}
},
"slocate": {
"100": {
"checksum": "sha256:be1825562f583305597e5ceb1298ebb60e42c4f270b4a7e3751cf9d9be1b1fac",
"enabled": 1
}
},
"slpd": {
"100": {
"checksum": "sha256:14748519962688e62b7bc7e7c03ad91c1f815c5d33c63f2d60e03340f55609a8",
"enabled": 1
}
},
"smartmon": {
"100": {
"checksum": "sha256:9f26cf1e9fa128e98c758a6325525f8547950a2440b6582202228c3c5c2c80d9",
"enabled": 1
}
},
"smokeping": {
"100": {
"checksum": "sha256:ae8cbd09d519a42bc01063c4c16f58e96cb3673acb557dcd2d09af444d742db1",
"enabled": 1
}
},
"smoltclient": {
"100": {
"checksum": "sha256:8aa5f2749eeaef5ae871dc903dad87611e369c92e9b3fc28b4944f75db785a18",
"enabled": 1
}
},
"smsd": {
"100": {
"checksum": "sha256:d36a762c836a0e4305773e352fe0f46657784b5d9bf749f02df9c6d15f68d101",
"enabled": 1
}
},
"snapper": {
"100": {
"checksum": "sha256:62bba8f6a236bae902815188cedbb5f3090acf0829247e6808787f8c913d9981",
"enabled": 1
}
},
"snmp": {
"100": {
"checksum": "sha256:68b5e9d408704e44ebf29ba76ae18afdcf6d8aef12794e8e9026997376ce12f8",
"enabled": 1
}
},
"snort": {
"100": {
"checksum": "sha256:eef39dec8d416650af3f9eeeb518b06dd9a9e09144aa579b6bd6422ba0037d70",
"enabled": 1
}
},
"sosreport": {
"100": {
"checksum": "sha256:c19dc2ed34c3d274f8e01647dc2d869ca06d4a9a3009f57c1845fac4d33ed358",
"enabled": 1
}
},
"soundserver": {
"100": {
"checksum": "sha256:a46a9508591afb1407fd14441c9c26cd495a3789e3c6792a2eba38a6642e4b97",
"enabled": 1
}
},
"spamassassin": {
"100": {
"checksum": "sha256:8255ad891466762e31763d6f4791a32aa1eea1147a812020724eab8eb07c1916",
"enabled": 1
}
},
"speech-dispatcher": {
"100": {
"checksum": "sha256:ce5ba130d5d0ae5fafe8f823b824856590f990ad7c08aa0a5930f5060c252021",
"enabled": 1
}
},
"squid": {
"100": {
"checksum": "sha256:4170a7354e69ed60e0268389f74042e02a2511a4451ca20b97a63213b8881e1e",
"enabled": 1
}
},
"ssh": {
"100": {
"checksum": "sha256:a4b4b395d2185abfd68edce0f813103ccbedd5d9748f9a41d83cc63dd1465109",
"enabled": 1
}
},
"sslh": {
"100": {
"checksum": "sha256:5b0cc219f31e88f2fa78bc31d9c6fe6c7af29b4832509635672ca9edc79409c6",
"enabled": 1
}
},
"sssd": {
"100": {
"checksum": "sha256:29cd0921e9effe356c856c3319488adf66c794cbb7d1610e5fca2b730b852939",
"enabled": 1
}
},
"staff": {
"100": {
"checksum": "sha256:943b25df416f2181aab46b3492aad9336f60a1b5b46187494f43ab516aae9c6a",
"enabled": 1
}
},
"stapserver": {
"100": {
"checksum": "sha256:788f2eb60a3d902060a6c5a08b086e2a1e96d213f86b206736da7e37eb21e51d",
"enabled": 1
}
},
"stratisd": {
"100": {
"checksum": "sha256:72c10f773d67b4209c39b4bea22e95c66d105f6f13e30f89bcd568eab6c889e3",
"enabled": 1
}
},
"stunnel": {
"100": {
"checksum": "sha256:736a46f682ff77d7c2cf54d5c264eb7b149793c12701b96e9be12bb3e6722796",
"enabled": 1
}
},
"su": {
"100": {
"checksum": "sha256:0cc5796bfe362c3b28c73f62377c029a5f2321078b6d5f90bad42764415cd038",
"enabled": 1
}
},
"sudo": {
"100": {
"checksum": "sha256:d96538a9cbb09fc38ba701cda88b2a0d199ab7826826d0043e4f07b05418bf84",
"enabled": 1
}
},
"svnserve": {
"100": {
"checksum": "sha256:a80606afbcc994e6fdc418cd83182f901d3e5b4b7b36fe262c71a25f43f10af1",
"enabled": 1
}
},
"swift": {
"100": {
"checksum": "sha256:19dfb362a8f445099eac9281522f0b13794cb9a0893a7acf0b54c15d193ef70e",
"enabled": 1
}
},
"sysadm": {
"100": {
"checksum": "sha256:f0e7b74086d47000f8335de5bade5a5a19a5e83bf581f885db92548546b7ea94",
"enabled": 1
}
},
"sysadm_secadm": {
"100": {
"checksum": "sha256:4614737ea0603530691e6158eb1bd07efa1992cb7ef52c201df3a637d3184cdf",
"enabled": 1
}
},
"sysnetwork": {
"100": {
"checksum": "sha256:f6a5a3b49885a9f780c5a9078cc968673809eaf89ecbe170fbb8a1ed4f521ea2",
"enabled": 1
}
},
"sysstat": {
"100": {
"checksum": "sha256:1fadc57b1e46515cbc038e96ae47ab74dd365a910f4d81ec9fb3044c4691260b",
"enabled": 1
}
},
"systemd": {
"100": {
"checksum": "sha256:a5f0e5c340eaf127a166cc50be8170bfce80ccee0c14f32e4cc264089350da1a",
"enabled": 1
}
},
"tangd": {
"100": {
"checksum": "sha256:fd538dbdeba0b4a1c244ba76b8dfef47f61da5a56f24f39fc24c137a9b3b303a",
"enabled": 1
}
},
"targetd": {
"100": {
"checksum": "sha256:bc0f37cdcdd0c9014e89e8be6758f7d9c97c67a4e42652459d6107314f059632",
"enabled": 1
}
},
"tcpd": {
"100": {
"checksum": "sha256:c78dcf2b9abf8d5ccf9f32b2debf6181a935a7078fe4a527991ab11d2999c4a9",
"enabled": 1
}
},
"tcsd": {
"100": {
"checksum": "sha256:e92fb82a2e509e3595d46dd464dac1029ce3a731f117fa67712d119d2878f195",
"enabled": 1
}
},
"telepathy": {
"100": {
"checksum": "sha256:fea41add022251126312da78373cb7fd05df1e9fd27547f1b4fc604a774827a1",
"enabled": 1
}
},
"telnet": {
"100": {
"checksum": "sha256:06d4733c0fc7358d738d4dbf53968c9d9017a72b01456be46633364f00a4207d",
"enabled": 1
}
},
"tftp": {
"100": {
"checksum": "sha256:8ba2497a28f4c2a31177811fc0a091a3bb9814f9e02cfc8d84c004718f661e5f",
"enabled": 1
}
},
"tgtd": {
"100": {
"checksum": "sha256:6ec8d4d38e58efa04572ac713c9148e7182e7d49713ed89955fabdd512b8eea4",
"enabled": 1
}
},
"thin": {
"100": {
"checksum": "sha256:c464da2b8e789d74ea2b2914217a194a3c07081b9f383acd2fee9ab77bc525b5",
"enabled": 1
}
},
"thumb": {
"100": {
"checksum": "sha256:2ce98252c7ff59539bb38204ee65898ba6cc701c3dc87417c11e2e7124f448a3",
"enabled": 1
}
},
"timedatex": {
"100": {
"checksum": "sha256:df36b9f44f28df1b14b4d6bff01de42c414b947a8e6f1e6efdaa7023250709aa",
"enabled": 1
}
},
"tlp": {
"100": {
"checksum": "sha256:7b1d2643c7470dc5b80dee41d18482bb6fd6de55371aba888708a28fe0bb0172",
"enabled": 1
}
},
"tmpreaper": {
"100": {
"checksum": "sha256:2a54cea48dfbeb1c9dad0e167f70aa17970c4f2c76c560330c467051fe3b574b",
"enabled": 1
}
},
"tomcat": {
"100": {
"checksum": "sha256:de3ed9b8d62d29e80e29a051419a648c154c12f6bb188814ca79120ff1dc263b",
"enabled": 1
}
},
"tor": {
"100": {
"checksum": "sha256:16c95ae098af2b964a7a94b5bb6cd1c84d5c7f1254d6411209e4d5cfe87677bc",
"enabled": 1
}
},
"tuned": {
"100": {
"checksum": "sha256:b90ac3a04d3f04c7284f75802ffd69d6c1c3d5c0e6d08c3d0f2d9270b99dd487",
"enabled": 1
}
},
"tvtime": {
"100": {
"checksum": "sha256:8f8a1f1b2fea7a9fb8c3853e02c830f5204f691e9223cbdfbc320ec6914725dc",
"enabled": 1
}
},
"udev": {
"100": {
"checksum": "sha256:24410f1221660b8443af29cb55e42180e268fce722ceed2c99aa202e7dd3cc21",
"enabled": 1
}
},
"ulogd": {
"100": {
"checksum": "sha256:dba41aee81015b99378cff2273a56effd1202c0c937c05c63a913243b0641cdc",
"enabled": 1
}
},
"uml": {
"100": {
"checksum": "sha256:29e7469ef2704943f23c5040531fee8657cfed8440ef44b6268d21e6a9afe309",
"enabled": 1
}
},
"unconfined": {
"100": {
"checksum": "sha256:54482715f4fb5bca5c68ff67b9d145d12ad3df1438db97bcadcc32a2fb0f6191",
"enabled": 1
}
},
"unconfineduser": {
"100": {
"checksum": "sha256:13e69d4cbec7926c0ac6fb796749b4286462add3051f1e94554f23e637b81277",
"enabled": 1
}
},
"unlabelednet": {
"100": {
"checksum": "sha256:cb370bbe8bc0d7bca49a4fd1fad652017f4f8587c7c9d3277155fba32987550e",
"enabled": 1
}
},
"unprivuser": {
"100": {
"checksum": "sha256:bbb2700ca73d867432851e12276a932b1553b034b1cc635f5c6681d6b62dcd3a",
"enabled": 1
}
},
"updfstab": {
"100": {
"checksum": "sha256:57a37a5c07af0f7ad80f4f01173e6cd6b604659e2d1b5605c2719dff8bbaf2fb",
"enabled": 1
}
},
"usbmodules": {
"100": {
"checksum": "sha256:683c0598bdd00543cb696f7ed8cce6b55c658e566141538fc01b3f852af5f697",
"enabled": 1
}
},
"usbmuxd": {
"100": {
"checksum": "sha256:852eb8259277c64b80c91bd1dcbbe85f629e7218ab2f51d39324dcd78a4a278e",
"enabled": 1
}
},
"userdomain": {
"100": {
"checksum": "sha256:066e429e71ebcf11014f4ff6d7647c9d6d88ff191c64eeb9793021d16f4cde97",
"enabled": 1
}
},
"userhelper": {
"100": {
"checksum": "sha256:74b817fb60fd3ed5f074ef8ff399342ddc49fb2c250b08015dc975edd48f4dfd",
"enabled": 1
}
},
"usermanage": {
"100": {
"checksum": "sha256:fa589ab303d10fadd28a3e8d27cc9bc2e55a9b28f28c3f4c7e05968cb00a7cdd",
"enabled": 1
}
},
"usernetctl": {
"100": {
"checksum": "sha256:c5e4e24e89775d797a8988e2d5f72ec7a7dd8387289ede61af7a3ce2173cf167",
"enabled": 1
}
},
"uucp": {
"100": {
"checksum": "sha256:6a3659d3706bc3af4b60e5de7efa9532dcc0c0a6f0c7735ed1300ec2120f9d01",
"enabled": 1
}
},
"uuidd": {
"100": {
"checksum": "sha256:f85ad7d20dd77416ab246ee0837b016a648176ec9956f40ff2ac6b3c2924edc5",
"enabled": 1
}
},
"varnishd": {
"100": {
"checksum": "sha256:18dab548c81b02f1b0f3efd6e25dd529bb0565e974156d55e42e274d3ccdf704",
"enabled": 1
}
},
"vdagent": {
"100": {
"checksum": "sha256:ee8af0b085b727e060ac3c82f1e38c89545505c9b26e849eda22e571064c46e7",
"enabled": 1
}
},
"vhostmd": {
"100": {
"checksum": "sha256:0f7c8c575b060e863fe17e7ee8c67cc5cc3ea31da734a5428dc62c15f3b15bf4",
"enabled": 1
}
},
"virt": {
"100": {
"checksum": "sha256:df433826471b1c65a3686b57b4b07872a695d900731feb88cd6dfb76ddcbc5d9",
"enabled": 1
}
},
"vlock": {
"100": {
"checksum": "sha256:4a9362fc5876897cae7062564d54d7f8ae12413c65c4c7fc6709f6407cc27160",
"enabled": 1
}
},
"vmtools": {
"100": {
"checksum": "sha256:fb9dda20b16232ac253b148063c9b267356b6f2831650f4c00fa01a6d0a8024a",
"enabled": 1
}
},
"vmware": {
"100": {
"checksum": "sha256:d0ce73ebc7d2f494b669257a9a68106245371b455566654c7062694bcbad35df",
"enabled": 1
}
},
"vnstatd": {
"100": {
"checksum": "sha256:1df1aaf42d9c96922226b4828c38b6d315f7a9d3cda60fe54d99be5d618e140d",
"enabled": 1
}
},
"vpn": {
"100": {
"checksum": "sha256:9ea8931bf1c97618b2e99afb8c60a13d51a84db878bffa4082f6973e23b13eb1",
"enabled": 1
}
},
"w3c": {
"100": {
"checksum": "sha256:43663b66ef8275c639a8076d92fc7da6821e0523c120e2c854839f9dc9d1db66",
"enabled": 1
}
},
"watchdog": {
"100": {
"checksum": "sha256:65b78e9b48a6cfe62f6c67c443d3bc667a58d206c09df00870949b6ae7ff8c30",
"enabled": 1
}
},
"wdmd": {
"100": {
"checksum": "sha256:65560477bd0ae271799a76f75c5a3d46ef0c29f6922aa38e727c95b7e1095a99",
"enabled": 1
}
},
"webadm": {
"100": {
"checksum": "sha256:4d4d609b3be3c2dc659694cfd2076e0c0c0d6446d16a3fb054a9e5f951b29410",
"enabled": 1
}
},
"webalizer": {
"100": {
"checksum": "sha256:867139a0cc2cb236ee54575ce6a8568cdbefd6785e8b7f64e09a3041da46b095",
"enabled": 1
}
},
"wine": {
"100": {
"checksum": "sha256:419d697ac987518dee6095070e2894c4112b50256e59d2b4f6acac585fb087f8",
"enabled": 1
}
},
"wireshark": {
"100": {
"checksum": "sha256:ce85b40df4d548aa55eb54bc546943366b654a3af7f602817f1fc499c0c8039e",
"enabled": 1
}
},
"xen": {
"100": {
"checksum": "sha256:f5d46e297e4e8e0a3f76c1fc8ae96db3ebf5b99ab538a54c171e489ac94ae1f0",
"enabled": 1
}
},
"xguest": {
"100": {
"checksum": "sha256:aeb8895098531d1607e389703c783a3c1e8a8c1ad962397debe65214ff86e29e",
"enabled": 1
}
},
"xserver": {
"100": {
"checksum": "sha256:85f1f1ed778597ec568ab7b9069779c088219d1da283a09382439c6803e7863e",
"enabled": 1
}
},
"zabbix": {
"100": {
"checksum": "sha256:476521323be1b84d7ba2539aa208d857678746a76e7e079577d3f46d251637ac",
"enabled": 1
}
},
"zarafa": {
"100": {
"checksum": "sha256:7536116b2852a578cbc5d32f7752b6dd3bb1202817db05306e1a16553c1d43b6",
"enabled": 1
}
},
"zebra": {
"100": {
"checksum": "sha256:3d18bbdc44c396c7715cce348f9248712132a1c53341d3b5760016d245f86e75",
"enabled": 1
}
},
"zoneminder": {
"100": {
"checksum": "sha256:44cf07d7e6b15709d131b8b406032d0e6395a84e1e20bc67f9320a1e97c4dfcc",
"enabled": 1
}
},
"zosremote": {
"100": {
"checksum": "sha256:1177170edbd47b6fe17fa022a247d9b75b1fb3a5a49721bcff3c7da4f480c702",
"enabled": 1
}
}
},
"selinux_priorities": true
},
"changed": false
}
TASK [fedora.linux_system_roles.selinux : Load SELinux modules] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115
Saturday 25 May 2024 08:58:03 +0000 (0:00:03.161) 0:01:09.837 **********
skipping: [sut] => {
"changed": false,
"false_condition": "selinux_modules is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128
Saturday 25 May 2024 08:58:03 +0000 (0:00:00.036) 0:01:09.874 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136
Saturday 25 May 2024 08:58:03 +0000 (0:00:00.016) 0:01:09.890 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44
Saturday 25 May 2024 08:58:03 +0000 (0:00:00.024) 0:01:09.914 **********
changed: [sut] => {
"changed": true,
"rc": 0,
"results": [
"Installed: fence-agents-common-4.2.1-129.el8.noarch",
"Installed: ipmitool-1.8.18-19.el8.x86_64",
"Installed: fence-agents-rsb-4.2.1-129.el8.noarch",
"Installed: python3-ptyprocess-0.5.2-4.el8.noarch",
"Installed: python3-suds-0.7-0.11.94664ddd46a6.el8.noarch",
"Installed: fence-agents-compute-4.2.1-129.el8.noarch",
"Installed: fence-agents-sbd-4.2.1-129.el8.noarch",
"Installed: net-snmp-utils-1:5.8-30.el8.x86_64",
"Installed: fence-agents-drac5-4.2.1-129.el8.noarch",
"Installed: fence-agents-scsi-4.2.1-129.el8.noarch",
"Installed: fence-agents-eaton-snmp-4.2.1-129.el8.noarch",
"Installed: autogen-libopts-5.18.12-8.el8.x86_64",
"Installed: device-mapper-multipath-libs-0.8.4-41.el8.x86_64",
"Installed: userspace-rcu-0.10.1-4.el8.x86_64",
"Installed: fence-agents-emerson-4.2.1-129.el8.noarch",
"Installed: fence-agents-vmware-rest-4.2.1-129.el8.noarch",
"Installed: fence-agents-eps-4.2.1-129.el8.noarch",
"Installed: sbd-1.5.2-2.el8.x86_64",
"Installed: fence-agents-vmware-soap-4.2.1-129.el8.noarch",
"Installed: openwsman-python3-2.6.5-10.el8.x86_64",
"Installed: fence-agents-heuristics-ping-4.2.1-129.el8.noarch",
"Installed: fence-agents-wti-4.2.1-129.el8.noarch",
"Installed: fence-agents-hpblade-4.2.1-129.el8.noarch",
"Installed: fence-virt-1.0.0-6.el8.x86_64",
"Installed: telnet-1:0.17-76.el8.x86_64",
"Installed: fence-agents-ibmblade-4.2.1-129.el8.noarch",
"Installed: fence-agents-ifmib-4.2.1-129.el8.noarch",
"Installed: fence-agents-ilo-moonshot-4.2.1-129.el8.noarch",
"Installed: fence-agents-ilo-mp-4.2.1-129.el8.noarch",
"Installed: fence-agents-ilo-ssh-4.2.1-129.el8.noarch",
"Installed: fence-agents-ilo2-4.2.1-129.el8.noarch",
"Installed: fence-agents-intelmodular-4.2.1-129.el8.noarch",
"Installed: python3-pexpect-4.3.1-3.el8.noarch",
"Installed: fence-agents-ipdu-4.2.1-129.el8.noarch",
"Installed: fence-agents-all-4.2.1-129.el8.x86_64",
"Installed: fence-agents-ipmilan-4.2.1-129.el8.noarch",
"Installed: fence-agents-amt-ws-4.2.1-129.el8.noarch",
"Installed: fence-agents-kdump-4.2.1-129.el8.x86_64",
"Installed: fence-agents-apc-4.2.1-129.el8.noarch",
"Installed: gnutls-dane-3.6.16-8.el8.1.x86_64",
"Installed: fence-agents-apc-snmp-4.2.1-129.el8.noarch",
"Installed: fence-agents-bladecenter-4.2.1-129.el8.noarch",
"Installed: fence-agents-mpath-4.2.1-129.el8.noarch",
"Installed: device-mapper-multipath-0.8.4-41.el8.x86_64",
"Installed: fence-agents-brocade-4.2.1-129.el8.noarch",
"Installed: gnutls-utils-3.6.16-8.el8.1.x86_64",
"Installed: fence-agents-redfish-4.2.1-129.el8.x86_64",
"Installed: fence-agents-cisco-mds-4.2.1-129.el8.noarch",
"Installed: fence-agents-rhevm-4.2.1-129.el8.noarch",
"Installed: libwsman1-2.6.5-10.el8.x86_64",
"Installed: fence-agents-cisco-ucs-4.2.1-129.el8.noarch",
"Installed: fence-agents-rsa-4.2.1-129.el8.noarch"
]
}
lsrpackages: corosync fence-agents-all fence-virt libknet1-plugins-all openssl pacemaker resource-agents
TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:50
Saturday 25 May 2024 08:58:10 +0000 (0:00:07.135) 0:01:17.050 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Create /etc/cluster directory] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:3
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.037) 0:01:17.087 **********
changed: [sut] => {
"changed": true,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/cluster",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Get fence_xvm.key] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:9
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.318) 0:01:17.406 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.042) 0:01:17.448 **********
skipping: [sut] => {
"changed": false,
"false_condition": "preshared_key_src is string and preshared_key_src | length > 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.030) 0:01:17.479 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.029) 0:01:17.508 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44
Saturday 25 May 2024 08:58:10 +0000 (0:00:00.030) 0:01:17.539 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch generated fence_xvm.key] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.317) 0:01:17.856 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.032) 0:01:17.889 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.207) 0:01:18.096 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes fence_xvm.key] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.030) 0:01:18.127 **********
skipping: [sut] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Send fence_xvm.key to nodes] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:17
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.040) 0:01:18.167 **********
changed: [sut] => {
"changed": true,
"checksum": "7f8f92dae79ae3d7c6db1f99d18c5b96c12a18d0",
"dest": "/etc/cluster/fence_xvm.key",
"gid": 0,
"group": "root",
"md5sum": "80f8fecbe1e0201a3575c88ac488972d",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:cluster_conf_t:s0",
"size": 512,
"src": "/root/.ansible/tmp/ansible-tmp-1716627491.4731226-11978-126973924485232/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:55
Saturday 25 May 2024 08:58:11 +0000 (0:00:00.554) 0:01:18.722 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel module blocklist] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:10
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.054) 0:01:18.777 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Unload watchdog kernel modules from blocklist] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:22
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.026) 0:01:18.803 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel modules] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:30
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.025) 0:01:18.829 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Load watchdog kernel modules] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:40
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.023) 0:01:18.853 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Probe SBD devices] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:57
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.026) 0:01:18.879 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Initialize SBD devices] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:70
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.024) 0:01:18.903 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute SBD config] ************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:88
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.015) 0:01:18.919 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_sbd_enabled",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Ensure /etc/systemd/system/sbd.service.d directory exists] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:109
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.024) 0:01:18.944 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_sbd_enabled",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Override start timeout for SBD] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:117
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.025) 0:01:18.970 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_sbd_enabled",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Reload systemd service files] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:131
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.024) 0:01:18.994 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_sbd_enabled",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135
Saturday 25 May 2024 08:58:12 +0000 (0:00:00.026) 0:01:19.020 **********
ok: [sut] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"arp-ethers.service": {
"name": "arp-ethers.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"auto-cpufreq.service": {
"name": "auto-cpufreq.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"corosync-notifyd.service": {
"name": "corosync-notifyd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync-qnetd.service": {
"name": "corosync-qnetd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync.service": {
"name": "corosync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crm_mon.service": {
"name": "crm_mon.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fcoe.service": {
"name": "fcoe.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"iscsi-shutdown.service": {
"name": "iscsi-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsi.service": {
"name": "iscsi.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsid.service": {
"name": "iscsid.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-activation-early.service": {
"name": "lvm2-activation-early.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-activation.service": {
"name": "lvm2-activation.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-pvscan@.service": {
"name": "lvm2-pvscan@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"multipathd.service": {
"name": "multipathd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pacemaker.service": {
"name": "pacemaker.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pcsd-ruby.service": {
"name": "pcsd-ruby.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"pcsd.service": {
"name": "pcsd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"power-profiles-daemon.service": {
"name": "power-profiles-daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rbdmap.service": {
"name": "rbdmap.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sbd.service": {
"name": "sbd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"sbd_remote.service": {
"name": "sbd_remote.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"snapd.seeded.service": {
"name": "snapd.seeded.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tlp.service": {
"name": "tlp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"yppasswdd.service": {
"name": "yppasswdd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypserv.service": {
"name": "ypserv.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypxfrd.service": {
"name": "ypxfrd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Set stonith-watchdog-timeout cluster property in CIB] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:142
Saturday 25 May 2024 08:58:13 +0000 (0:00:01.663) 0:01:20.684 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"--force",
"-f",
"/var/lib/pacemaker/cib/cib.xml",
"--",
"property",
"set",
"stonith-watchdog-timeout=0"
],
"delta": "0:00:00.656375",
"end": "2024-05-25 08:58:14.784855",
"rc": 0,
"start": "2024-05-25 08:58:14.128480"
}
TASK [fedora.linux_system_roles.ha_cluster : Correct cib.xml ownership] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:155
Saturday 25 May 2024 08:58:14 +0000 (0:00:00.867) 0:01:21.551 **********
changed: [sut] => {
"changed": true,
"gid": 189,
"group": "haclient",
"mode": "0600",
"owner": "hacluster",
"path": "/var/lib/pacemaker/cib/cib.xml",
"secontext": "unconfined_u:object_r:cluster_var_lib_t:s0",
"size": 412,
"state": "file",
"uid": 189
}
TASK [fedora.linux_system_roles.ha_cluster : Clean cib.xml.sig] ****************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:167
Saturday 25 May 2024 08:58:15 +0000 (0:00:00.223) 0:01:21.774 **********
ok: [sut] => {
"changed": false,
"path": "/var/lib/pacemaker/cib/cib.xml.sig",
"state": "absent"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] ***************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:58
Saturday 25 May 2024 08:58:15 +0000 (0:00:00.214) 0:01:21.989 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf tempfile] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:3
Saturday 25 May 2024 08:58:15 +0000 (0:00:00.044) 0:01:22.033 **********
changed: [sut] => {
"changed": true,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.bii_jwtu_ha_cluster_corosync_conf",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:16
Saturday 25 May 2024 08:58:15 +0000 (0:00:00.290) 0:01:22.324 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content using pcs-0.10] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:3
Saturday 25 May 2024 08:58:15 +0000 (0:00:00.063) 0:01:22.387 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"cluster",
"setup",
"--corosync_conf",
"/tmp/ansible.bii_jwtu_ha_cluster_corosync_conf",
"--overwrite",
"--no-cluster-uuid",
"--",
"test-cluster",
"localhost"
],
"delta": "0:00:00.543808",
"end": "2024-05-25 08:58:16.398490",
"rc": 0,
"start": "2024-05-25 08:58:15.854682"
}
STDOUT:
Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts'
No addresses specified for host 'localhost', using 'localhost'
TASK [fedora.linux_system_roles.ha_cluster : Add qdevice configuration to corosync.conf] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:81
Saturday 25 May 2024 08:58:16 +0000 (0:00:00.779) 0:01:23.167 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_qdevice_in_use",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch created corosync.conf file] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:19
Saturday 25 May 2024 08:58:16 +0000 (0:00:00.024) 0:01:23.191 **********
ok: [sut] => {
"changed": false,
"content": "dG90ZW0gewogICAgdmVyc2lvbjogMgogICAgY2x1c3Rlcl9uYW1lOiB0ZXN0LWNsdXN0ZXIKICAgIHRyYW5zcG9ydDoga25ldAogICAgY3J5cHRvX2NpcGhlcjogYWVzMjU2CiAgICBjcnlwdG9faGFzaDogc2hhMjU2Cn0KCm5vZGVsaXN0IHsKICAgIG5vZGUgewogICAgICAgIHJpbmcwX2FkZHI6IGxvY2FsaG9zdAogICAgICAgIG5hbWU6IGxvY2FsaG9zdAogICAgICAgIG5vZGVpZDogMQogICAgfQp9CgpxdW9ydW0gewogICAgcHJvdmlkZXI6IGNvcm9zeW5jX3ZvdGVxdW9ydW0KfQoKbG9nZ2luZyB7CiAgICB0b19sb2dmaWxlOiB5ZXMKICAgIGxvZ2ZpbGU6IC92YXIvbG9nL2NsdXN0ZXIvY29yb3N5bmMubG9nCiAgICB0b19zeXNsb2c6IHllcwogICAgdGltZXN0YW1wOiBvbgp9Cg==",
"encoding": "base64",
"source": "/tmp/ansible.bii_jwtu_ha_cluster_corosync_conf"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync.conf file] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:26
Saturday 25 May 2024 08:58:16 +0000 (0:00:00.265) 0:01:23.457 **********
changed: [sut] => {
"changed": true,
"checksum": "05d2ec2a2bfa233bb2b4ace4aae02b42cafc012b",
"dest": "/etc/corosync/corosync.conf",
"gid": 0,
"group": "root",
"md5sum": "beb73759420421ade7d3b0d2f8dd24ef",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 388,
"src": "/root/.ansible/tmp/ansible-tmp-1716627496.7658691-12015-101917471645725/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Remove a corosync.conf tempfile] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:36
Saturday 25 May 2024 08:58:17 +0000 (0:00:00.530) 0:01:23.987 **********
changed: [sut] => {
"changed": true,
"path": "/tmp/ansible.bii_jwtu_ha_cluster_corosync_conf",
"state": "absent"
}
TASK [fedora.linux_system_roles.ha_cluster : Cluster auth] *********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:61
Saturday 25 May 2024 08:58:17 +0000 (0:00:00.218) 0:01:24.206 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Check pcs auth status] ************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:3
Saturday 25 May 2024 08:58:17 +0000 (0:00:00.042) 0:01:24.248 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"status",
"pcsd",
"--",
"localhost"
],
"delta": "0:00:00.560430",
"end": "2024-05-25 08:58:18.260466",
"failed_when_result": false,
"rc": 2,
"start": "2024-05-25 08:58:17.700036"
}
STDOUT:
Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts'
localhost: Unable to authenticate
MSG:
non-zero return code
TASK [fedora.linux_system_roles.ha_cluster : Run pcs auth] *********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:23
Saturday 25 May 2024 08:58:18 +0000 (0:00:00.778) 0:01:25.027 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3
Saturday 25 May 2024 08:58:18 +0000 (0:00:00.036) 0:01:25.063 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"host",
"auth",
"-u",
"hacluster",
"--",
"localhost"
],
"delta": "0:00:00.964577",
"end": "2024-05-25 08:58:19.494470",
"rc": 0,
"start": "2024-05-25 08:58:18.529893"
}
STDOUT:
Password: localhost: Authorized
TASK [fedora.linux_system_roles.ha_cluster : Pcs auth for qdevice using pcs-0.10] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:19
Saturday 25 May 2024 08:58:19 +0000 (0:00:01.197) 0:01:26.261 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_qdevice_model == \"net\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:66
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.025) 0:01:26.286 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Get corosync authkey] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:3
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.045) 0:01:26.332 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.040) 0:01:26.373 **********
skipping: [sut] => {
"changed": false,
"false_condition": "preshared_key_src is string and preshared_key_src | length > 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.030) 0:01:26.403 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.029) 0:01:26.432 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.031) 0:01:26.464 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch generated corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50
Saturday 25 May 2024 08:58:19 +0000 (0:00:00.217) 0:01:26.681 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.063) 0:01:26.744 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.210) 0:01:26.955 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes corosync authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.030) 0:01:26.986 **********
skipping: [sut] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync authkey] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:11
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.042) 0:01:27.028 **********
changed: [sut] => {
"changed": true,
"checksum": "6080700aa70db8426bf6f4c85084ef3509084204",
"dest": "/etc/corosync/authkey",
"gid": 0,
"group": "root",
"md5sum": "5b1bdead6c1630d235fbaffa488e61cf",
"mode": "0400",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 256,
"src": "/root/.ansible/tmp/ansible-tmp-1716627500.3318195-12045-50271173160531/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Get pacemaker authkey] ************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:20
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.527) 0:01:27.555 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.043) 0:01:27.599 **********
skipping: [sut] => {
"changed": false,
"false_condition": "preshared_key_src is string and preshared_key_src | length > 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.030) 0:01:27.629 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.030) 0:01:27.660 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44
Saturday 25 May 2024 08:58:20 +0000 (0:00:00.030) 0:01:27.690 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch generated pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50
Saturday 25 May 2024 08:58:21 +0000 (0:00:00.216) 0:01:27.906 **********
ok: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60
Saturday 25 May 2024 08:58:21 +0000 (0:00:00.032) 0:01:27.939 **********
ok: [sut] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65
Saturday 25 May 2024 08:58:21 +0000 (0:00:00.213) 0:01:28.152 **********
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes pacemaker authkey] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73
Saturday 25 May 2024 08:58:21 +0000 (0:00:00.030) 0:01:28.183 **********
skipping: [sut] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
skipping: [sut] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Distribute pacemaker authkey] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:28
Saturday 25 May 2024 08:58:21 +0000 (0:00:00.043) 0:01:28.226 **********
changed: [sut] => {
"changed": true,
"checksum": "76500352bc2ca701494b301f73374ce9739630e0",
"dest": "/etc/pacemaker/authkey",
"gid": 189,
"group": "haclient",
"md5sum": "cdbc6b48c40cbf6cf892641aa0403d71",
"mode": "0400",
"owner": "hacluster",
"secontext": "system_u:object_r:etc_t:s0",
"size": 256,
"src": "/root/.ansible/tmp/ansible-tmp-1716627501.5307622-12065-78447861455938/source",
"state": "file",
"uid": 189
}
TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [CLI]] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:37
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.529) 0:01:28.756 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"--",
"qdevice",
"net-client",
"destroy"
],
"delta": "0:00:00.540473",
"end": "2024-05-25 08:58:22.741550",
"rc": 0,
"start": "2024-05-25 08:58:22.201077"
}
TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [API]] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:46
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.753) 0:01:29.509 **********
skipping: [sut] => {
"changed": false,
"false_condition": "'corosync.quorum.device.client.model.net.certificates.rest-api.v2' in __ha_cluster_pcsd_capabilities",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [CLI]] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:54
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.025) 0:01:29.534 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_qdevice_in_use",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [API]] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:71
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.025) 0:01:29.560 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_qdevice_in_use",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Enable or disable cluster services on boot] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:72
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.024) 0:01:29.584 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Enable or disable configured cluster services on boot] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:3
Saturday 25 May 2024 08:58:22 +0000 (0:00:00.045) 0:01:29.630 **********
changed: [sut] => (item=corosync) => {
"ansible_loop_var": "item",
"changed": true,
"enabled": true,
"item": "corosync",
"name": "corosync",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "network-online.target sysinit.target systemd-journald.socket basic.target system.slice",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Corosync Cluster Engine",
"DevicePolicy": "auto",
"Documentation": "man:corosync man:corosync.conf man:corosync_overview",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/corosync.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "corosync.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "corosync.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "network-online.target sysinit.target system.slice",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
skipping: [sut] => (item=corosync-qdevice) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "item != 'corosync-qdevice' or __ha_cluster_qdevice_in_use",
"item": "corosync-qdevice",
"skip_reason": "Conditional result was False"
}
changed: [sut] => (item=pacemaker) => {
"ansible_loop_var": "item",
"changed": true,
"enabled": true,
"item": "pacemaker",
"name": "pacemaker",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "corosync.service time-sync.target dbus.service rsyslog.service network.target sysinit.target basic.target systemd-journald.socket resource-agents-deps.target system.slice",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Pacemaker High Availability Cluster Manager",
"DevicePolicy": "auto",
"Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/pacemaker.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "pacemaker.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "pacemaker.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice sysinit.target corosync.service",
"Restart": "on-failure",
"RestartUSec": "1s",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "25s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "infinity",
"TimeoutStartUSec": "1min",
"TimeoutStopUSec": "30min",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "dbus.service resource-agents-deps.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect SBD] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:16
Saturday 25 May 2024 08:58:23 +0000 (0:00:01.025) 0:01:30.655 **********
ok: [sut] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"arp-ethers.service": {
"name": "arp-ethers.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"auto-cpufreq.service": {
"name": "auto-cpufreq.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"corosync-notifyd.service": {
"name": "corosync-notifyd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync-qnetd.service": {
"name": "corosync-qnetd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync.service": {
"name": "corosync.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crm_mon.service": {
"name": "crm_mon.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fcoe.service": {
"name": "fcoe.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"iscsi-shutdown.service": {
"name": "iscsi-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsi.service": {
"name": "iscsi.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsid.service": {
"name": "iscsid.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-activation-early.service": {
"name": "lvm2-activation-early.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-activation.service": {
"name": "lvm2-activation.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-pvscan@.service": {
"name": "lvm2-pvscan@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"multipathd.service": {
"name": "multipathd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pacemaker.service": {
"name": "pacemaker.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"pcsd-ruby.service": {
"name": "pcsd-ruby.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"pcsd.service": {
"name": "pcsd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"power-profiles-daemon.service": {
"name": "power-profiles-daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rbdmap.service": {
"name": "rbdmap.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sbd.service": {
"name": "sbd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"sbd_remote.service": {
"name": "sbd_remote.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"snapd.seeded.service": {
"name": "snapd.seeded.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tlp.service": {
"name": "tlp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"yppasswdd.service": {
"name": "yppasswdd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypserv.service": {
"name": "ypserv.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypxfrd.service": {
"name": "ypxfrd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Enable or disable SBD] ************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:19
Saturday 25 May 2024 08:58:25 +0000 (0:00:01.640) 0:01:32.296 **********
ok: [sut] => {
"changed": false,
"enabled": false,
"name": "sbd",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice basic.target systemd-modules-load.service systemd-journald.socket iscsi.service sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target pacemaker.service dlm.service",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "no",
"CanStop": "no",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Shared-storage based fencing daemon",
"DevicePolicy": "auto",
"Documentation": "man:sbd(8)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/sbd ; argv[]=/usr/sbin/sbd $SBD_OPTS -p /run/sbd.pid watch ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/kill ; argv[]=/usr/bin/kill -TERM $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/sbd.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "sbd.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "sbd.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PIDFile": "/run/sbd.pid",
"PartOf": "corosync.service",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "yes",
"RefuseManualStop": "yes",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice sysinit.target",
"Restart": "on-abort",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "forking",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Start the cluster and reload corosync.conf] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:75
Saturday 25 May 2024 08:58:25 +0000 (0:00:00.366) 0:01:32.662 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect corosync-qdevice] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:21
Saturday 25 May 2024 08:58:25 +0000 (0:00:00.049) 0:01:32.712 **********
ok: [sut] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"arp-ethers.service": {
"name": "arp-ethers.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"auto-cpufreq.service": {
"name": "auto-cpufreq.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"chrony-dnssrv@.service": {
"name": "chrony-dnssrv@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"corosync-notifyd.service": {
"name": "corosync-notifyd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync-qnetd.service": {
"name": "corosync-qnetd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"corosync.service": {
"name": "corosync.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cpupower.service": {
"name": "cpupower.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"crm_mon.service": {
"name": "crm_mon.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"crond.service": {
"name": "crond.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-makecache.service": {
"name": "dnf-makecache.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fcoe.service": {
"name": "fcoe.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"halt-local.service": {
"name": "halt-local.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"import-state.service": {
"name": "import-state.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iprdump.service": {
"name": "iprdump.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprinit.service": {
"name": "iprinit.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"iprupdate.service": {
"name": "iprupdate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"irqbalance.service": {
"name": "irqbalance.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"iscsi-shutdown.service": {
"name": "iscsi-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsi.service": {
"name": "iscsi.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iscsid.service": {
"name": "iscsid.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kdump.service": {
"name": "kdump.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"kvm_stat.service": {
"name": "kvm_stat.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"loadmodules.service": {
"name": "loadmodules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-activation-early.service": {
"name": "lvm2-activation-early.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-activation.service": {
"name": "lvm2-activation.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"lvm2-pvscan@.service": {
"name": "lvm2-pvscan@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"messagebus.service": {
"name": "messagebus.service",
"source": "systemd",
"state": "active",
"status": "static"
},
"microcode.service": {
"name": "microcode.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"multipathd.service": {
"name": "multipathd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-convert.service": {
"name": "nfs-convert.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"oddjobd.service": {
"name": "oddjobd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"pacemaker.service": {
"name": "pacemaker.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"pcsd-ruby.service": {
"name": "pcsd-ruby.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"pcsd.service": {
"name": "pcsd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"power-profiles-daemon.service": {
"name": "power-profiles-daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"qemu-guest-agent.service": {
"name": "qemu-guest-agent.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"quotaon.service": {
"name": "quotaon.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"rbdmap.service": {
"name": "rbdmap.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rdisc.service": {
"name": "rdisc.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rsyslog.service": {
"name": "rsyslog.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sbd.service": {
"name": "sbd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"sbd_remote.service": {
"name": "sbd_remote.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"snapd.seeded.service": {
"name": "snapd.seeded.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "active",
"status": "enabled"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-resume@.service": {
"name": "systemd-hibernate-resume@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck.service": {
"name": "systemd-quotacheck.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "masked"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"tcsd.service": {
"name": "tcsd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"teamd@.service": {
"name": "teamd@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"timedatex.service": {
"name": "timedatex.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"tlp.service": {
"name": "tlp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"tuned.service": {
"name": "tuned.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"ypbind.service": {
"name": "ypbind.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"yppasswdd.service": {
"name": "yppasswdd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypserv.service": {
"name": "ypserv.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ypxfrd.service": {
"name": "ypxfrd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Stop cluster daemons to reload configuration] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:24
Saturday 25 May 2024 08:58:27 +0000 (0:00:01.638) 0:01:34.350 **********
ok: [sut] => (item=pacemaker) => {
"ansible_loop_var": "item",
"changed": false,
"item": "pacemaker",
"name": "pacemaker",
"state": "stopped",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "dbus.service corosync.service system.slice network.target resource-agents-deps.target sysinit.target rsyslog.service time-sync.target systemd-journald.socket basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Pacemaker High Availability Cluster Manager",
"DevicePolicy": "auto",
"Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/pacemaker.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "pacemaker.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "pacemaker.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice corosync.service sysinit.target",
"Restart": "on-failure",
"RestartUSec": "1s",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "25s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "infinity",
"TimeoutStartUSec": "1min",
"TimeoutStopUSec": "30min",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "resource-agents-deps.target dbus.service",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
skipping: [sut] => (item=corosync-qdevice) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "item != 'corosync-qdevice' or 'corosync-qdevice.service' in ansible_facts.services\n",
"item": "corosync-qdevice",
"skip_reason": "Conditional result was False"
}
ok: [sut] => (item=corosync) => {
"ansible_loop_var": "item",
"changed": false,
"item": "corosync",
"name": "corosync",
"state": "stopped",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice basic.target systemd-journald.socket network-online.target sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "pacemaker.service shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Corosync Cluster Engine",
"DevicePolicy": "auto",
"Documentation": "man:corosync man:corosync.conf man:corosync_overview",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/corosync.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "corosync.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "corosync.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"RequiredBy": "pacemaker.service",
"Requires": "system.slice sysinit.target network-online.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-05-25 08:58:23 UTC",
"StateChangeTimestampMonotonic": "1702897287",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Start corosync] *******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:47
Saturday 25 May 2024 08:58:28 +0000 (0:00:00.693) 0:01:35.044 **********
changed: [sut] => {
"changed": true,
"name": "corosync",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice basic.target systemd-journald.socket network-online.target sysinit.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "pacemaker.service shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Corosync Cluster Engine",
"DevicePolicy": "auto",
"Documentation": "man:corosync man:corosync.conf man:corosync_overview",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/corosync.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "corosync.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "corosync.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"RequiredBy": "pacemaker.service",
"Requires": "system.slice sysinit.target network-online.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Sat 2024-05-25 08:58:23 UTC",
"StateChangeTimestampMonotonic": "1702897287",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Reload corosync configuration] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:56
Saturday 25 May 2024 08:58:29 +0000 (0:00:00.764) 0:01:35.809 **********
ok: [sut] => {
"changed": false,
"cmd": [
"corosync-cfgtool",
"-R"
],
"delta": "0:00:00.013763",
"end": "2024-05-25 08:58:29.265545",
"rc": 0,
"start": "2024-05-25 08:58:29.251782"
}
STDOUT:
Reloading corosync.conf...
Done
TASK [fedora.linux_system_roles.ha_cluster : Start corosync-qdevice] ***********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:63
Saturday 25 May 2024 08:58:29 +0000 (0:00:00.226) 0:01:36.036 **********
skipping: [sut] => {
"changed": false,
"false_condition": "__ha_cluster_qdevice_in_use",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Start pacemaker] ******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:69
Saturday 25 May 2024 08:58:29 +0000 (0:00:00.025) 0:01:36.062 **********
changed: [sut] => {
"changed": true,
"name": "pacemaker",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "dbus.service corosync.service system.slice network.target resource-agents-deps.target sysinit.target rsyslog.service time-sync.target systemd-journald.socket basic.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Pacemaker High Availability Cluster Manager",
"DevicePolicy": "auto",
"Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/pacemaker.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "pacemaker.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "pacemaker.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "main",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice corosync.service sysinit.target",
"Restart": "on-failure",
"RestartUSec": "1s",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "25s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "infinity",
"TimeoutStartUSec": "1min",
"TimeoutStopUSec": "30min",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "resource-agents-deps.target dbus.service",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.ha_cluster : Wait for the cluster to fully start and form membership] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:75
Saturday 25 May 2024 08:58:29 +0000 (0:00:00.372) 0:01:36.434 **********
ok: [sut] => {
"changed": false,
"cmd": [
"pcs",
"cluster",
"start",
"--all",
"--wait"
],
"delta": "0:00:26.431429",
"end": "2024-05-25 08:58:56.340923",
"rc": 0,
"start": "2024-05-25 08:58:29.909494"
}
STDOUT:
localhost: Starting Cluster...
Waiting for node(s) to start...
localhost: Started
TASK [fedora.linux_system_roles.ha_cluster : List pacemaker nodes] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:89
Saturday 25 May 2024 08:58:56 +0000 (0:00:26.674) 0:02:03.108 **********
ok: [sut] => {
"changed": false,
"cmd": "set -euo pipefail; crm_mon -X | xmllint --xpath '/crm_mon/nodes/node/@name' - | sed -E 's/\\s*name=\"([^\"]+)\"\\s*/\\1\\n/g'\n",
"delta": "0:00:00.067303",
"end": "2024-05-25 08:58:56.615802",
"rc": 0,
"start": "2024-05-25 08:58:56.548499"
}
STDOUT:
localhost
TASK [fedora.linux_system_roles.ha_cluster : Purge removed nodes from pacemaker's cache] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:111
Saturday 25 May 2024 08:58:56 +0000 (0:00:00.272) 0:02:03.381 **********
skipping: [sut] => (item=localhost) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "item not in __ha_cluster_all_node_names",
"item": "localhost",
"skip_reason": "Conditional result was False"
}
skipping: [sut] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.ha_cluster : Create and push CIB] **************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:78
Saturday 25 May 2024 08:58:56 +0000 (0:00:00.029) 0:02:03.411 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for original CIB] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:10
Saturday 25 May 2024 08:58:56 +0000 (0:00:00.062) 0:02:03.473 **********
changed: [sut] => {
"changed": true,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for new CIB] ****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:18
Saturday 25 May 2024 08:58:56 +0000 (0:00:00.206) 0:02:03.679 **********
changed: [sut] => {
"changed": true,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Fetch CIB configuration] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:26
Saturday 25 May 2024 08:58:57 +0000 (0:00:00.236) 0:02:03.916 **********
ok: [sut] => {
"changed": false,
"cmd": [
"cibadmin",
"--query"
],
"delta": "0:00:00.011564",
"end": "2024-05-25 08:58:57.371804",
"rc": 0,
"start": "2024-05-25 08:58:57.360240"
}
STDOUT:
TASK [fedora.linux_system_roles.ha_cluster : Write CIB configuration] **********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:33
Saturday 25 May 2024 08:58:57 +0000 (0:00:00.221) 0:02:04.138 **********
changed: [sut] => (item=/tmp/ansible.00xlj_78_ha_cluster_cib_xml) => {
"ansible_loop_var": "item",
"changed": true,
"checksum": "3e19ca25b089f9debcfd1eeee7671a8b8fcee225",
"dest": "/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"gid": 0,
"group": "root",
"item": "/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"md5sum": "945a218d603127a4294a5932200e7d8c",
"mode": "0600",
"owner": "root",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 1507,
"src": "/root/.ansible/tmp/ansible-tmp-1716627537.4427683-12112-185598599422835/source",
"state": "file",
"uid": 0
}
changed: [sut] => (item=/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml) => {
"ansible_loop_var": "item",
"changed": true,
"checksum": "3e19ca25b089f9debcfd1eeee7671a8b8fcee225",
"dest": "/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"gid": 0,
"group": "root",
"item": "/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"md5sum": "945a218d603127a4294a5932200e7d8c",
"mode": "0600",
"owner": "root",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 1507,
"src": "/root/.ansible/tmp/ansible-tmp-1716627537.9584954-12112-1028779300159/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Purge new CIB configuration] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:51
Saturday 25 May 2024 08:58:58 +0000 (0:00:01.049) 0:02:05.187 **********
changed: [sut] => {
"changed": true,
"cmd": [
"cibadmin",
"--force",
"--delete-all",
"--xpath",
"/cib/configuration/*[not(\n self::crm_config or\n self::nodes or\n self::resources or\n self::constraints\n)] | /cib/configuration/*[self::resources or self::constraints]/* | /cib/configuration/nodes/*/* | /cib/configuration/crm_config//nvpair[not(\n @name=\"cluster-infrastructure\" or\n @name=\"cluster-name\" or\n @name=\"dc-version\" or\n @name=\"have-watchdog\" or\n @name=\"last-lrm-refresh\" or\n @name=\"stonith-watchdog-timeout\"\n)]"
],
"delta": "0:00:00.032294",
"end": "2024-05-25 08:58:58.660605",
"rc": 0,
"start": "2024-05-25 08:58:58.628311"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster properties] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:85
Saturday 25 May 2024 08:58:58 +0000 (0:00:00.239) 0:02:05.427 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-properties.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster properties set] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-properties.yml:3
Saturday 25 May 2024 08:58:58 +0000 (0:00:00.042) 0:02:05.469 **********
changed: [sut] => (item={'name': 'placement-strategy', 'value': 'utilization'}) => {
"ansible_loop_var": "item",
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"property",
"set",
"placement-strategy=utilization"
],
"delta": "0:00:00.645665",
"end": "2024-05-25 08:58:59.558169",
"item": {
"name": "placement-strategy",
"value": "utilization"
},
"rc": 0,
"start": "2024-05-25 08:58:58.912504"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure node attributes] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:95
Saturday 25 May 2024 08:58:59 +0000 (0:00:00.857) 0:02:06.327 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure node utilization] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:102
Saturday 25 May 2024 08:58:59 +0000 (0:00:00.028) 0:02:06.355 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-node-utilization.yml for sut => (item={'node_name': 'localhost', 'utilization': [{'attrs': [{'name': 'cpu', 'value': 2}, {'name': 'memory', 'value': 4096}]}]})
TASK [fedora.linux_system_roles.ha_cluster : Configure utilizations for node localhost] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-node-utilization.yml:3
Saturday 25 May 2024 08:58:59 +0000 (0:00:00.046) 0:02:06.402 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"node",
"utilization",
"localhost",
"cpu=2",
"memory=4096"
],
"delta": "0:00:00.669370",
"end": "2024-05-25 08:59:00.518362",
"rc": 0,
"start": "2024-05-25 08:58:59.848992"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource defaults] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:115
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.884) 0:02:07.287 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource operation defaults] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:127
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.024) 0:02:07.311 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Extract primitive to bundle mapping] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:137
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.024) 0:02:07.336 **********
ok: [sut] => {
"ansible_facts": {
"__ha_cluster_primitive_bundle_map": {}
},
"changed": false
}
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster bundle resources] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:141
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.029) 0:02:07.366 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resources] ******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:147
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.015) 0:02:07.382 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml for sut => (item={'id': 'resource1', 'agent': 'ocf:pacemaker:Dummy', 'utilization': [{'attrs': [{'name': 'cpu', 'value': 1}]}, {'attrs': [{'name': 'memory', 'value': 1024}]}]})
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml for sut => (item={'id': 'resource2', 'agent': 'ocf:pacemaker:Dummy'})
TASK [fedora.linux_system_roles.ha_cluster : Configure resource primitive resource1] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml:3
Saturday 25 May 2024 08:59:00 +0000 (0:00:00.046) 0:02:07.428 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"resource",
"create",
"resource1",
"ocf:pacemaker:Dummy"
],
"delta": "0:00:00.642220",
"end": "2024-05-25 08:59:01.524684",
"rc": 0,
"start": "2024-05-25 08:59:00.882464"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure utilization for resource primitive resource1] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml:61
Saturday 25 May 2024 08:59:01 +0000 (0:00:00.866) 0:02:08.294 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"resource",
"utilization",
"resource1",
"cpu=1"
],
"delta": "0:00:00.664996",
"end": "2024-05-25 08:59:02.408501",
"rc": 0,
"start": "2024-05-25 08:59:01.743505"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource primitive resource2] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml:3
Saturday 25 May 2024 08:59:02 +0000 (0:00:00.883) 0:02:09.178 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"resource",
"create",
"resource2",
"ocf:pacemaker:Dummy"
],
"delta": "0:00:00.643064",
"end": "2024-05-25 08:59:03.276780",
"rc": 0,
"start": "2024-05-25 08:59:02.633716"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure utilization for resource primitive resource2] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-resource-primitive.yml:61
Saturday 25 May 2024 08:59:03 +0000 (0:00:00.866) 0:02:10.044 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"-f",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"--",
"resource",
"utilization",
"resource2"
],
"delta": "0:00:00.653153",
"end": "2024-05-25 08:59:04.141607",
"rc": 0,
"start": "2024-05-25 08:59:03.488454"
}
STDOUT:
Resource Utilization:
resource2:
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resource groups] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:154
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.864) 0:02:10.909 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resource clones] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:160
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:10.924 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure stonith levels] *********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:167
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.049) 0:02:10.974 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource location constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:175
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.016) 0:02:10.991 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource colocation constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:182
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.016) 0:02:11.007 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource set colocation constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:190
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:11.022 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource order constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:200
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:11.038 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource set order constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:208
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:11.053 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource ticket constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:218
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:11.069 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure resource set ticket constraints] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:226
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.015) 0:02:11.085 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure acls] *******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:236
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.016) 0:02:11.101 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Configure ACL roles] **************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:3
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.042) 0:02:11.143 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure ACL users] **************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:28
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.025) 0:02:11.168 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure ACL groups] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:44
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.026) 0:02:11.195 **********
skipping: [sut] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for CIB diff] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:243
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.024) 0:02:11.220 **********
changed: [sut] => {
"changed": true,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Compare new and original CIB] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:251
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.207) 0:02:11.427 **********
changed: [sut] => {
"changed": true,
"cmd": [
"crm_diff",
"--no-version",
"--original",
"/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"--new",
"/tmp/ansible.00xlj_78_ha_cluster_cib_xml"
],
"delta": "0:00:00.006493",
"end": "2024-05-25 08:59:04.874993",
"failed_when_result": false,
"rc": 1,
"start": "2024-05-25 08:59:04.868500"
}
STDOUT:
MSG:
non-zero return code
TASK [fedora.linux_system_roles.ha_cluster : Write CIB diff to its tempfile] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:264
Saturday 25 May 2024 08:59:04 +0000 (0:00:00.215) 0:02:11.642 **********
changed: [sut] => {
"changed": true,
"checksum": "179b46639843446f35743877c978fe0145dc1d71",
"dest": "/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff",
"gid": 0,
"group": "root",
"md5sum": "8072fe81ef6a1b3e652897c077c0b864",
"mode": "0600",
"owner": "root",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 2503,
"src": "/root/.ansible/tmp/ansible-tmp-1716627544.9528847-12169-160255948438132/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.ha_cluster : Push CIB diff to the cluster if it has any changes] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:275
Saturday 25 May 2024 08:59:05 +0000 (0:00:00.548) 0:02:12.191 **********
changed: [sut] => {
"changed": true,
"cmd": [
"cibadmin",
"--verbose",
"--patch",
"--xml-file",
"/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff"
],
"delta": "0:00:00.018319",
"end": "2024-05-25 08:59:05.657561",
"rc": 0,
"start": "2024-05-25 08:59:05.639242"
}
TASK [fedora.linux_system_roles.ha_cluster : Remove CIB tempfiles] *************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:284
Saturday 25 May 2024 08:59:05 +0000 (0:00:00.252) 0:02:12.444 **********
changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.00xlj_78_ha_cluster_cib_xml', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"changed": true,
"failed": false,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
},
"path": "/tmp/ansible.00xlj_78_ha_cluster_cib_xml",
"state": "absent"
}
changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"changed": true,
"failed": false,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
},
"path": "/tmp/ansible.wntemx2w_ha_cluster_original_cib_xml",
"state": "absent"
}
changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"changed": true,
"failed": false,
"gid": 0,
"group": "root",
"mode": "0600",
"owner": "root",
"path": "/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 0,
"state": "file",
"uid": 0
},
"path": "/tmp/ansible.yz7wd2ua_ha_cluster_cib_diff",
"state": "absent"
}
TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] *****
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:87
Saturday 25 May 2024 08:59:06 +0000 (0:00:00.589) 0:02:13.034 **********
skipping: [sut] => {
"changed": false,
"false_condition": "not ha_cluster_cluster_present",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey] ********
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:90
Saturday 25 May 2024 08:59:06 +0000 (0:00:00.016) 0:02:13.050 **********
skipping: [sut] => {
"changed": false,
"false_condition": "not ha_cluster_cluster_present",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Configure qnetd] ******************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:95
Saturday 25 May 2024 08:59:06 +0000 (0:00:00.016) 0:02:13.067 **********
included: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml for sut
TASK [fedora.linux_system_roles.ha_cluster : Remove qnetd configuration] *******
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3
Saturday 25 May 2024 08:59:06 +0000 (0:00:00.065) 0:02:13.133 **********
changed: [sut] => {
"changed": true,
"cmd": [
"pcs",
"--force",
"--",
"qdevice",
"destroy",
"net"
],
"delta": "0:00:01.095541",
"end": "2024-05-25 08:59:07.671857",
"rc": 0,
"start": "2024-05-25 08:59:06.576316"
}
STDOUT:
Stopping quorum device...
quorum device stopped
quorum device disabled
Quorum device 'net' configuration files removed
TASK [fedora.linux_system_roles.ha_cluster : Setup qnetd] **********************
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:16
Saturday 25 May 2024 08:59:07 +0000 (0:00:01.309) 0:02:14.442 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_qnetd.present | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.ha_cluster : Enable or disable qnetd service on boot] ***
task path: /WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:26
Saturday 25 May 2024 08:59:07 +0000 (0:00:00.026) 0:02:14.468 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_qnetd.present | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [Fetch node utilization configuration from the cluster] *******************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:71
Saturday 25 May 2024 08:59:07 +0000 (0:00:00.033) 0:02:14.501 **********
ok: [sut] => {
"changed": false,
"cmd": [
"pcs",
"node",
"utilization"
],
"delta": "0:00:00.618356",
"end": "2024-05-25 08:59:08.554801",
"rc": 0,
"start": "2024-05-25 08:59:07.936445"
}
STDOUT:
Node Utilization:
localhost: cpu=2 memory=4096
TASK [Print real node utilization configuration] *******************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:77
Saturday 25 May 2024 08:59:08 +0000 (0:00:00.819) 0:02:15.321 **********
ok: [sut] => {
"__test_pcs_node_utilization_config": {
"changed": false,
"cmd": [
"pcs",
"node",
"utilization"
],
"delta": "0:00:00.618356",
"end": "2024-05-25 08:59:08.554801",
"failed": false,
"msg": "",
"rc": 0,
"start": "2024-05-25 08:59:07.936445",
"stderr": "",
"stderr_lines": [],
"stdout": "Node Utilization:\n localhost: cpu=2 memory=4096",
"stdout_lines": [
"Node Utilization:",
" localhost: cpu=2 memory=4096"
]
}
}
TASK [Print expected node utilization configuration] ***************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:81
Saturday 25 May 2024 08:59:08 +0000 (0:00:00.020) 0:02:15.342 **********
ok: [sut] => {
"__test_expected_lines | list": [
"Node Utilization:",
" localhost: cpu=2 memory=4096"
]
}
TASK [Check node utilization configuration] ************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:85
Saturday 25 May 2024 08:59:08 +0000 (0:00:00.029) 0:02:15.371 **********
ok: [sut] => {
"changed": false
}
MSG:
All assertions passed
TASK [Fetch resource utilization configuration from the cluster] ***************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:96
Saturday 25 May 2024 08:59:08 +0000 (0:00:00.028) 0:02:15.400 **********
ok: [sut] => {
"changed": false,
"cmd": [
"pcs",
"resource",
"utilization"
],
"delta": "0:00:00.621368",
"end": "2024-05-25 08:59:09.455643",
"rc": 0,
"start": "2024-05-25 08:59:08.834275"
}
STDOUT:
Resource Utilization:
resource1: cpu=1
TASK [Print real resource utilization configuration] ***************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:102
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.822) 0:02:16.223 **********
ok: [sut] => {
"__test_pcs_resource_utilization_config": {
"changed": false,
"cmd": [
"pcs",
"resource",
"utilization"
],
"delta": "0:00:00.621368",
"end": "2024-05-25 08:59:09.455643",
"failed": false,
"msg": "",
"rc": 0,
"start": "2024-05-25 08:59:08.834275",
"stderr": "",
"stderr_lines": [],
"stdout": "Resource Utilization:\n resource1: cpu=1",
"stdout_lines": [
"Resource Utilization:",
" resource1: cpu=1"
]
}
}
TASK [Print expected resource utilization configuration] ***********************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:106
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.019) 0:02:16.243 **********
ok: [sut] => {
"__test_expected_lines | list": [
"Resource Utilization:",
" resource1: cpu=1"
]
}
TASK [Check resource utilization configuration] ********************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:110
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.028) 0:02:16.271 **********
ok: [sut] => {
"changed": false
}
MSG:
All assertions passed
TASK [Check firewall and selinux state] ****************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tests_cib_utilization.yml:115
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.028) 0:02:16.299 **********
included: /WORKDIR/git-weekly-ci1ebenttp/tests/tasks/check_firewall_selinux.yml for sut
TASK [Check firewall service status] *******************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tasks/check_firewall_selinux.yml:6
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.031) 0:02:16.331 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_manage_firewall | bool",
"skip_reason": "Conditional result was False"
}
TASK [Check firewall port status] **********************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tasks/check_firewall_selinux.yml:12
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.025) 0:02:16.356 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_manage_firewall | bool",
"skip_reason": "Conditional result was False"
}
TASK [Get associated selinux ports] ********************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tasks/check_firewall_selinux.yml:25
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.025) 0:02:16.382 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_manage_selinux | bool",
"skip_reason": "Conditional result was False"
}
TASK [Check associated selinux ports] ******************************************
task path: /WORKDIR/git-weekly-ci1ebenttp/tests/tasks/check_firewall_selinux.yml:33
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.030) 0:02:16.412 **********
skipping: [sut] => {
"changed": false,
"false_condition": "ha_cluster_manage_selinux | bool",
"skip_reason": "Conditional result was False"
}
PLAY RECAP *********************************************************************
sut : ok=134 changed=41 unreachable=0 failed=0 skipped=111 rescued=0 ignored=0
Saturday 25 May 2024 08:59:09 +0000 (0:00:00.085) 0:02:16.498 **********
===============================================================================
fedora.linux_system_roles.ha_cluster : Wait for the cluster to fully start and form membership -- 26.67s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:75
fedora.linux_system_roles.selinux : Set an SELinux label on a port ----- 22.76s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87
fedora.linux_system_roles.ha_cluster : Install role essential packages -- 21.02s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11
fedora.linux_system_roles.ha_cluster : Install cluster packages --------- 7.14s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44
fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 3.28s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:45
fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.16s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112
fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.47s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:23
fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.43s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:19
fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 2.00s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88
fedora.linux_system_roles.ha_cluster : Populate service facts ----------- 1.76s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3
fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker --- 1.66s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135
fedora.linux_system_roles.ha_cluster : Get services status - detect SBD --- 1.64s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:16
fedora.linux_system_roles.ha_cluster : Get services status - detect corosync-qdevice --- 1.64s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:21
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.52s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
fedora.linux_system_roles.ha_cluster : Remove qnetd configuration ------- 1.31s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3
fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities ----------- 1.27s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141
fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10 ---------- 1.20s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3
fedora.linux_system_roles.ha_cluster : Write CIB configuration ---------- 1.05s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:33
fedora.linux_system_roles.ha_cluster : Enable or disable configured cluster services on boot --- 1.03s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:3
fedora.linux_system_roles.firewall : Enable and start firewalld service --- 0.95s
/WORKDIR/git-weekly-ci1ebenttp/.collection/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
---^---^---^---^---^---
# STDERR:
---v---v---v---v---v---
[DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var
naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This
feature will be removed from ansible-core in version 2.19. Deprecation warnings
can be disabled by setting deprecation_warnings=False in ansible.cfg.
[DEPRECATION WARNING]: Encryption using the Python crypt module is deprecated.
The Python crypt module is deprecated and will be removed from Python 3.13.
Install the passlib library for continued encryption functionality. This
feature will be removed in version 2.17. Deprecation warnings can be disabled
by setting deprecation_warnings=False in ansible.cfg.
---^---^---^---^---^---