ansible-playbook [core 2.17.6] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-o6g executable location = /usr/local/bin/ansible-playbook python version = 3.12.7 (main, Oct 1 2024, 00:00:00) [GCC 13.3.1 20240913 (Red Hat 13.3.1-3)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_qnetd.yml ****************************************************** 2 plays in /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:5 Monday 18 November 2024 10:09:02 -0500 (0:00:00.014) 0:00:00.014 ******* ok: [managed-node1] => { "ansible_facts": { "ha_cluster_hacluster_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n" } }, "ansible_included_var_files": [ "/tmp/ha_cluster-TFQ/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test qnetd setup] ******************************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:9 Monday 18 November 2024 10:09:02 -0500 (0:00:00.042) 0:00:00.056 ******* [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Set up test environment] ************************************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:22 Monday 18 November 2024 10:09:04 -0500 (0:00:01.607) 0:00:01.664 ******* included: fedora.linux_system_roles.ha_cluster for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9 Monday 18 November 2024 10:09:04 -0500 (0:00:00.055) 0:00:01.720 ******* ok: [managed-node1] => { "ansible_facts": { "inventory_hostname": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14 Monday 18 November 2024 10:09:04 -0500 (0:00:00.043) 0:00:01.763 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "'distribution' not in ansible_facts", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22 Monday 18 November 2024 10:09:04 -0500 (0:00:00.019) 0:00:01.783 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:27 Monday 18 November 2024 10:09:04 -0500 (0:00:00.536) 0:00:02.320 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:32 Monday 18 November 2024 10:09:05 -0500 (0:00:00.027) 0:00:02.347 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_distribution == 'RedHat'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Copy nss-altfiles ha_cluster users to /etc/passwd] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:41 Monday 18 November 2024 10:09:05 -0500 (0:00:00.018) 0:00:02.366 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__ha_cluster_is_ostree | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up test environment for qnetd] ************************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:27 Monday 18 November 2024 10:09:05 -0500 (0:00:00.044) 0:00:02.410 ******* included: fedora.linux_system_roles.ha_cluster for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 Monday 18 November 2024 10:09:05 -0500 (0:00:00.036) 0:00:02.447 ******* ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd config files are not present] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:16 Monday 18 November 2024 10:09:07 -0500 (0:00:02.233) 0:00:04.680 ******* ok: [managed-node1] => { "changed": false, "path": "/etc/corosync/qnetd", "state": "absent" } TASK [Run HA Cluster role] ***************************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:32 Monday 18 November 2024 10:09:08 -0500 (0:00:00.644) 0:00:05.324 ******* included: fedora.linux_system_roles.ha_cluster for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3 Monday 18 November 2024 10:09:08 -0500 (0:00:00.085) 0:00:05.409 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 Monday 18 November 2024 10:09:08 -0500 (0:00:00.036) 0:00:05.446 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__ha_cluster_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:10 Monday 18 November 2024 10:09:08 -0500 (0:00:00.049) 0:00:05.495 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:15 Monday 18 November 2024 10:09:08 -0500 (0:00:00.038) 0:00:05.534 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19 Monday 18 November 2024 10:09:08 -0500 (0:00:00.039) 0:00:05.573 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [], "__ha_cluster_fence_agent_packages_default": "{{ ['fence-agents-all'] + (['fence-virt'] if ansible_architecture == 'x86_64' else []) }}", "__ha_cluster_fullstack_node_packages": [ "corosync", "libknet1-plugins-all", "resource-agents", "pacemaker" ], "__ha_cluster_pcs_provider": "pcs-0.10", "__ha_cluster_qdevice_node_packages": [ "corosync-qdevice", "bash", "coreutils", "curl", "grep", "nss-tools", "openssl", "sed" ], "__ha_cluster_repos": [], "__ha_cluster_role_essential_packages": [ "pcs", "corosync-qnetd", "openssl" ], "__ha_cluster_sbd_packages": [ "sbd" ], "__ha_cluster_services": [ "corosync", "corosync-qdevice", "pacemaker" ] }, "ansible_included_var_files": [ "/tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node1] => (item=Fedora.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [] }, "ansible_included_var_files": [ "/tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node1] => (item=Fedora_39.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_39.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=Fedora_39.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_39.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set Linux Pacemaker shell specific variables] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:34 Monday 18 November 2024 10:09:08 -0500 (0:00:00.091) 0:00:05.664 ******* ok: [managed-node1] => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/shell_pcs.yml" ], "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ****** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 Monday 18 November 2024 10:09:08 -0500 (0:00:00.037) 0:00:05.702 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3 Monday 18 November 2024 10:09:08 -0500 (0:00:00.034) 0:00:05.737 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/RedHat.yml" }, "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [managed-node1] => (item=Fedora.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/Fedora.yml" }, "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [managed-node1] => (item=Fedora_39.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "Fedora_39.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=Fedora_39.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "Fedora_39.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Run platform/version specific tasks to enable repositories] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:21 Monday 18 November 2024 10:09:08 -0500 (0:00:00.062) 0:00:05.799 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/Fedora.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 Monday 18 November 2024 10:09:08 -0500 (0:00:00.054) 0:00:05.853 ******* changed: [managed-node1] => { "changed": true, "rc": 0, "results": [ "Installed: corosync-qnetd-3.0.3-3.fc39.x86_64" ] } TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:17 Monday 18 November 2024 10:09:12 -0500 (0:00:03.669) 0:00:09.522 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ****** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:3 Monday 18 November 2024 10:09:12 -0500 (0:00:00.070) 0:00:09.593 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_node_name": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:7 Monday 18 November 2024 10:09:12 -0500 (0:00:00.051) 0:00:09.645 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_all_node_names": [ "localhost" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_node_options contains unknown or duplicate nodes] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:16 Monday 18 November 2024 10:09:12 -0500 (0:00:00.065) 0:00:09.710 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "(\n __nodes_from_options != (__nodes_from_options | unique)\n) or (\n __nodes_from_options | difference(__ha_cluster_all_node_names)\n)\n", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Extract node options] ************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:30 Monday 18 November 2024 10:09:12 -0500 (0:00:00.068) 0:00:09.778 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_local_node": {} }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:43 Monday 18 November 2024 10:09:12 -0500 (0:00:00.080) 0:00:09.859 ******* skipping: [managed-node1] => (item=ha_cluster_hacluster_password) => { "ansible_loop_var": "item", "changed": false, "false_condition": "lookup(\"vars\", item, default=\"\") | string | length < 1", "item": "ha_cluster_hacluster_password", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:53 Monday 18 November 2024 10:09:12 -0500 (0:00:00.115) 0:00:09.974 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:69 Monday 18 November 2024 10:09:12 -0500 (0:00:00.030) 0:00:10.004 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no valid level is specified for a fencing level] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:79 Monday 18 November 2024 10:09:12 -0500 (0:00:00.022) 0:00:10.027 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no target is specified for a fencing level] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:87 Monday 18 November 2024 10:09:12 -0500 (0:00:00.018) 0:00:10.045 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:101 Monday 18 November 2024 10:09:12 -0500 (0:00:00.020) 0:00:10.065 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_qdevice_host": "", "__ha_cluster_qdevice_in_use": false, "__ha_cluster_qdevice_model": "", "__ha_cluster_qdevice_pcs_address": "" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110 Monday 18 November 2024 10:09:12 -0500 (0:00:00.061) 0:00:10.127 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_sbd_needs_atb": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:120 Monday 18 November 2024 10:09:12 -0500 (0:00:00.056) 0:00:10.183 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__ha_cluster_sbd_needs_atb | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:127 Monday 18 November 2024 10:09:12 -0500 (0:00:00.042) 0:00:10.226 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 Monday 18 November 2024 10:09:12 -0500 (0:00:00.025) 0:00:10.251 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "pcs", "--version", "--full" ], "delta": "0:00:00.840242", "end": "2024-11-18 10:09:14.273938", "rc": 0, "start": "2024-11-18 10:09:13.433696" } STDOUT: 0.11.8 booth cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.client.model.net.certificates.local corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.device.model.net.options_tls_and_kaptb corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.config.output-formats pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.config.output-formats pcmk.properties.cluster.defaults pcmk.properties.cluster.describe pcmk.properties.cluster.describe.output-formats pcmk.properties.cluster.set_properties.iso8601duration pcmk.properties.operation-defaults pcmk.properties.operation-defaults.config.output-formats pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.config.output-formats pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.group.future pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.meta.future pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.move.autoclean.default pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.reports.severity.deprecation pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.query.resource status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.wait status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:148 Monday 18 November 2024 10:09:14 -0500 (0:00:01.412) 0:00:11.664 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_pcs_capabilities": [ "booth", "cluster.config.backup-local", "cluster.config.restore-cluster", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.local", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "cluster.report", "cluster.verify", "corosync.authkey.update", "corosync.config.get", "corosync.config.get.struct", "corosync.config.reload", "corosync.config.sync-to-local-cluster", "corosync.config.update", "corosync.link.add", "corosync.link.remove", "corosync.link.remove.list", "corosync.link.update", "corosync.qdevice", "corosync.qdevice.model.net", "corosync.quorum", "corosync.quorum.device", "corosync.quorum.device.client.model.net.certificates.local", "corosync.quorum.device.heuristics", "corosync.quorum.device.model.net", "corosync.quorum.device.model.net.options_tls_and_kaptb", "corosync.quorum.set-expected-votes-runtime", "corosync.quorum.status", "corosync.quorum.unblock", "corosync.totem.block_unlisted_ips", "corosync.uidgid", "node.add", "node.add.enable", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.attributes.set-list-for-node", "node.confirm-off", "node.fence", "node.guest", "node.kill", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remote", "node.remote.onfail-demote", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.list", "node.start-stop-enable-disable.start-wait", "node.utilization", "node.utilization.set-list-for-node", "pcmk.acl.enable-disable", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.checkpoints", "pcmk.cib.checkpoints.diff", "pcmk.cib.edit", "pcmk.cib.get", "pcmk.cib.get.scope", "pcmk.cib.roles.promoted-unpromoted", "pcmk.cib.set", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.colocation.simple.options", "pcmk.constraint.config.output-formats", "pcmk.constraint.hide-expired", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.options", "pcmk.constraint.location.simple.resource-regexp", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.location.simple.rule.options", "pcmk.constraint.location.simple.rule.rule-add-remove", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.order.simple.options", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.config.output-formats", "pcmk.properties.cluster.defaults", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.output-formats", "pcmk.properties.cluster.set_properties.iso8601duration", "pcmk.properties.operation-defaults", "pcmk.properties.operation-defaults.config.output-formats", "pcmk.properties.operation-defaults.multiple", "pcmk.properties.operation-defaults.rule", "pcmk.properties.operation-defaults.rule-rsc-op", "pcmk.properties.operation-defaults.rule.hide-expired", "pcmk.properties.operation-defaults.rule.node-attr-type-number", "pcmk.properties.resource-defaults", "pcmk.properties.resource-defaults.config.output-formats", "pcmk.properties.resource-defaults.multiple", "pcmk.properties.resource-defaults.rule", "pcmk.properties.resource-defaults.rule-rsc-op", "pcmk.properties.resource-defaults.rule.hide-expired", "pcmk.properties.resource-defaults.rule.node-attr-type-number", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.bundle", "pcmk.resource.bundle.container-docker", "pcmk.resource.bundle.container-docker.promoted-max", "pcmk.resource.bundle.container-podman", "pcmk.resource.bundle.container-podman.promoted-max", "pcmk.resource.bundle.container-rkt", "pcmk.resource.bundle.container-rkt.promoted-max", "pcmk.resource.bundle.reset", "pcmk.resource.bundle.wait", "pcmk.resource.cleanup", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.clone.custom-id", "pcmk.resource.clone.meta-in-create", "pcmk.resource.clone.wait", "pcmk.resource.config.output-formats", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.group.future", "pcmk.resource.create.in-existing-bundle", "pcmk.resource.create.meta", "pcmk.resource.create.meta.future", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.debug", "pcmk.resource.delete", "pcmk.resource.disable.safe", "pcmk.resource.disable.safe.brief", "pcmk.resource.disable.safe.tag", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.tag", "pcmk.resource.enable-disable.wait", "pcmk.resource.failcount", "pcmk.resource.group", "pcmk.resource.group.add-remove-list", "pcmk.resource.group.wait", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.move.autoclean.default", "pcmk.resource.promotable", "pcmk.resource.promotable.custom-id", "pcmk.resource.promotable.meta-in-create", "pcmk.resource.promotable.wait", "pcmk.resource.refresh", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.relations", "pcmk.resource.relocate", "pcmk.resource.restart", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.update-meta.list", "pcmk.resource.update-meta.wait", "pcmk.resource.update-operations", "pcmk.resource.update-operations.onfail-demote", "pcmk.resource.update.meta", "pcmk.resource.update.operations", "pcmk.resource.update.operations.onfail-demote", "pcmk.resource.update.wait", "pcmk.resource.utilization", "pcmk.resource.utilization-set-list-for-resource", "pcmk.stonith.cleanup", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.history.cleanup", "pcmk.stonith.history.show", "pcmk.stonith.history.update", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.update", "pcmk.stonith.update.scsi-devices", "pcmk.stonith.update.scsi-devices.add-remove", "pcmk.stonith.update.scsi-devices.mpath", "pcmk.tag", "pcmk.tag.resources", "pcs.auth.client", "pcs.auth.client.cluster", "pcs.auth.client.token", "pcs.auth.deauth-client", "pcs.auth.deauth-server", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server.token", "pcs.cfg-in-file.cib", "pcs.daemon-ssl-cert.set", "pcs.daemon-ssl-cert.sync-to-local-cluster", "pcs.disaster-recovery.essentials", "pcs.reports.severity.deprecation", "pcs.request-timeout", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "sbd", "sbd.option-timeout-action", "sbd.shared-block-device", "status.corosync.membership", "status.pcmk.query.resource", "status.pcmk.resources.hide-inactive", "status.pcmk.resources.id", "status.pcmk.resources.node", "status.pcmk.resources.orphaned", "status.pcmk.wait", "status.pcmk.xml", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ], "__ha_cluster_pcsd_capabilities_available": true }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities] ********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155 Monday 18 November 2024 10:09:14 -0500 (0:00:00.041) 0:00:11.706 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "pcsd", "--version", "--full" ], "delta": "0:00:00.952244", "end": "2024-11-18 10:09:15.710370", "rc": 0, "start": "2024-11-18 10:09:14.758126" } STDOUT: 0.11.8 booth.get-config booth.set-config booth.set-config.multiple cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all corosync.config.get corosync.config.set corosync.qdevice.model.net.certificates corosync.quorum.device.client corosync.quorum.device.client.model.net.certificates corosync.quorum.device.client.model.net.certificates.local corosync.quorum.device.client.model.net.certificates.rest-api.v2 corosync.quorum.status corosync.totem.block_unlisted_ips node.add node.add.enable node.add.list node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.stop-component node.utilization pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.role.delete-with-users-groups-implicit pcmk.acl.user pcmk.alert pcmk.cib.get pcmk.cib.remove_elements.constraints pcmk.cib.remove_elements.rest-api.v2 pcmk.cib.roles.promoted-unpromoted pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.config.output-formats pcmk.constraint.location.simple pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.describe pcmk.properties.cluster.describe.rest-api.v2 pcmk.properties.cluster.get_properties.rest-api.v2 pcmk.properties.cluster.set_properties.iso8601duration pcmk.properties.cluster.set_properties.rest-api.v2 pcmk.properties.operation-defaults.config.rest-api.v2 pcmk.properties.resource-defaults.config.rest-api.v2 pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.delete pcmk.resource.delete.list pcmk.resource.disable.safe pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.wait pcmk.resource.group pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.promotable pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.update pcmk.resource.update-meta pcmk.resource.utilization pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.delete.list pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.scsi-unfence-node-mpath pcmk.stonith.scsi-unfence-node-v2 pcmk.stonith.update pcs.auth.export-cluster-known-hosts pcs.auth.known-host-change pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server pcs.automatic-pcs-configs-sync pcs.daemon-ssl-cert.set pcs.permissions pcs.rest-api.v1.1 pcs.rest-api.v2 resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation resource-agents.simplified-query-api sbd sbd-node sbd-node.shared-block-device sbd.option-timeout-action status.pcmk.local-node status.pcmk.resources.rest-api.v2 status.pcmk.wait stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcsd capabilities] ********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:163 Monday 18 November 2024 10:09:15 -0500 (0:00:01.384) 0:00:13.091 ******* ok: [managed-node1] => { "ansible_facts": { "__ha_cluster_pcsd_capabilities": [ "booth.get-config", "booth.set-config", "booth.set-config.multiple", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "corosync.config.get", "corosync.config.set", "corosync.qdevice.model.net.certificates", "corosync.quorum.device.client", "corosync.quorum.device.client.model.net.certificates", "corosync.quorum.device.client.model.net.certificates.local", "corosync.quorum.device.client.model.net.certificates.rest-api.v2", "corosync.quorum.status", "corosync.totem.block_unlisted_ips", "node.add", "node.add.enable", "node.add.list", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.stop-component", "node.utilization", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.role.delete-with-users-groups-implicit", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.get", "pcmk.cib.remove_elements.constraints", "pcmk.cib.remove_elements.rest-api.v2", "pcmk.cib.roles.promoted-unpromoted", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.config.output-formats", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.rest-api.v2", "pcmk.properties.cluster.get_properties.rest-api.v2", "pcmk.properties.cluster.set_properties.iso8601duration", "pcmk.properties.cluster.set_properties.rest-api.v2", "pcmk.properties.operation-defaults.config.rest-api.v2", "pcmk.properties.resource-defaults.config.rest-api.v2", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.meta", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.delete", "pcmk.resource.delete.list", "pcmk.resource.disable.safe", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.wait", "pcmk.resource.group", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.promotable", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.utilization", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.delete.list", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.scsi-unfence-node-mpath", "pcmk.stonith.scsi-unfence-node-v2", "pcmk.stonith.update", "pcs.auth.export-cluster-known-hosts", "pcs.auth.known-host-change", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server", "pcs.automatic-pcs-configs-sync", "pcs.daemon-ssl-cert.set", "pcs.permissions", "pcs.rest-api.v1.1", "pcs.rest-api.v2", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "resource-agents.simplified-query-api", "sbd", "sbd-node", "sbd-node.shared-block-device", "sbd.option-timeout-action", "status.pcmk.local-node", "status.pcmk.resources.rest-api.v2", "status.pcmk.wait", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if pcs is to old to configure resources and operations defaults] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:172 Monday 18 November 2024 10:09:15 -0500 (0:00:00.051) 0:00:13.142 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "( ha_cluster_resource_defaults and not 'pcmk.properties.resource-defaults.multiple' in __ha_cluster_pcs_capabilities ) or ( ha_cluster_resource_operation_defaults and not 'pcmk.properties.operation-defaults.multiple' in __ha_cluster_pcs_capabilities )", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Generate a password hash] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:26 Monday 18 November 2024 10:09:15 -0500 (0:00:00.019) 0:00:13.162 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:38 Monday 18 November 2024 10:09:16 -0500 (0:00:00.543) 0:00:13.706 ******* ok: [managed-node1] => { "append": false, "changed": false, "comment": "cluster user", "group": 189, "home": "/var/lib/pacemaker", "move_home": false, "name": "hacluster", "password": "NOT_LOGGING_PASSWORD", "shell": "/sbin/nologin", "state": "present", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Configure shell] ****************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:43 Monday 18 November 2024 10:09:17 -0500 (0:00:00.776) 0:00:14.483 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************ task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 Monday 18 November 2024 10:09:17 -0500 (0:00:00.042) 0:00:14.526 ******* changed: [managed-node1] => { "changed": true, "name": "pcsd", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2024-11-18 10:07:17 EST", "ActiveEnterTimestampMonotonic": "1155537224", "ActiveExitTimestamp": "Mon 2024-11-18 10:07:13 EST", "ActiveExitTimestampMonotonic": "1151695282", "ActiveState": "active", "After": "network-online.target system.slice sysinit.target pcsd-ruby.service systemd-journald.socket basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2024-11-18 10:07:16 EST", "AssertTimestampMonotonic": "1154309482", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "18583304000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2024-11-18 10:07:16 EST", "ConditionTimestampMonotonic": "1154309477", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroup": "/system.slice/pcsd.service", "ControlGroupId": "8250", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "66550", "ExecMainStartTimestamp": "Mon 2024-11-18 10:07:16 EST", "ExecMainStartTimestampMonotonic": "1154316270", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2024-11-18 10:07:13 EST", "InactiveEnterTimestampMonotonic": "1151829829", "InactiveExitTimestamp": "Mon 2024-11-18 10:07:16 EST", "InactiveExitTimestampMonotonic": "1154316866", "InvocationID": "a4473970d3d0468a80f8c437668e0d12", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14750", "LimitNPROCSoft": "14750", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14750", "LimitSIGPENDINGSoft": "14750", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "66550", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "410193920", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "network-online.target system.slice sysinit.target pcsd-ruby.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "18446744073709551615", "StartupMemoryLow": "0", "StartupMemoryMax": "18446744073709551615", "StartupMemorySwapMax": "18446744073709551615", "StartupMemoryZSwapMax": "18446744073709551615", "StateChangeTimestamp": "Mon 2024-11-18 10:07:17 EST", "StateChangeTimestampMonotonic": "1155537224", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "33", "TasksMax": "4425", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:11 Monday 18 November 2024 10:09:18 -0500 (0:00:01.229) 0:00:15.756 ******* skipping: [managed-node1] => (item=/var/lib/pcsd/pcsd.key) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.key", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=/var/lib/pcsd/pcsd.crt) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.crt", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] **** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:25 Monday 18 November 2024 10:09:18 -0500 (0:00:00.028) 0:00:15.784 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:30 Monday 18 November 2024 10:09:18 -0500 (0:00:00.051) 0:00:15.836 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:37 Monday 18 November 2024 10:09:18 -0500 (0:00:00.064) 0:00:15.900 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:49 Monday 18 November 2024 10:09:18 -0500 (0:00:00.084) 0:00:15.985 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:64 Monday 18 November 2024 10:09:18 -0500 (0:00:00.064) 0:00:16.049 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:71 Monday 18 November 2024 10:09:18 -0500 (0:00:00.066) 0:00:16.116 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] ***** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 Monday 18 November 2024 10:09:18 -0500 (0:00:00.073) 0:00:16.189 ******* changed: [managed-node1] => { "changed": true, "checksum": "b504e1b9c9aa23803dd6f95e66c757088b08551d", "dest": "/var/lib/pcsd/pcs_settings.conf", "gid": 0, "group": "root", "md5sum": "087ff556d850518c8fff5ad1179d8817", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cluster_var_lib_t:s0", "size": 359, "src": "/root/.ansible/tmp/ansible-tmp-1731942558.9224844-30992-221715345681206/.source.conf", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 Monday 18 November 2024 10:09:19 -0500 (0:00:00.997) 0:00:17.187 ******* changed: [managed-node1] => { "changed": true, "enabled": true, "name": "pcsd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2024-11-18 10:07:17 EST", "ActiveEnterTimestampMonotonic": "1155537224", "ActiveExitTimestamp": "Mon 2024-11-18 10:09:18 EST", "ActiveExitTimestampMonotonic": "1276252129", "ActiveState": "inactive", "After": "network-online.target system.slice sysinit.target pcsd-ruby.service systemd-journald.socket basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2024-11-18 10:07:16 EST", "AssertTimestampMonotonic": "1154309482", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "18745569000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2024-11-18 10:07:16 EST", "ConditionTimestampMonotonic": "1154309477", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroupId": "8250", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "1", "ExecMainExitTimestamp": "Mon 2024-11-18 10:09:18 EST", "ExecMainExitTimestampMonotonic": "1276381332", "ExecMainPID": "66550", "ExecMainStartTimestamp": "Mon 2024-11-18 10:07:16 EST", "ExecMainStartTimestampMonotonic": "1154316270", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[Mon 2024-11-18 10:07:16 EST] ; stop_time=[Mon 2024-11-18 10:09:18 EST] ; pid=66550 ; code=exited ; status=0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[Mon 2024-11-18 10:07:16 EST] ; stop_time=[Mon 2024-11-18 10:09:18 EST] ; pid=66550 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2024-11-18 10:09:18 EST", "InactiveEnterTimestampMonotonic": "1276384485", "InactiveExitTimestamp": "Mon 2024-11-18 10:07:16 EST", "InactiveExitTimestampMonotonic": "1154316866", "InvocationID": "a4473970d3d0468a80f8c437668e0d12", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14750", "LimitNPROCSoft": "14750", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14750", "LimitSIGPENDINGSoft": "14750", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "network-online.target system.slice sysinit.target pcsd-ruby.service", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "18446744073709551615", "StartupMemoryLow": "0", "StartupMemoryMax": "18446744073709551615", "StartupMemorySwapMax": "18446744073709551615", "StartupMemoryZSwapMax": "18446744073709551615", "StateChangeTimestamp": "Mon 2024-11-18 10:09:18 EST", "StateChangeTimestampMonotonic": "1276384485", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4425", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] *************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:49 Monday 18 November 2024 10:09:22 -0500 (0:00:02.475) 0:00:19.663 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml for managed-node1 TASK [Ensure the service and the ports status with the firewall role] ********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml:3 Monday 18 November 2024 10:09:22 -0500 (0:00:00.054) 0:00:19.717 ******* included: fedora.linux_system_roles.firewall for managed-node1 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Monday 18 November 2024 10:09:22 -0500 (0:00:00.082) 0:00:19.800 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Monday 18 November 2024 10:09:22 -0500 (0:00:00.067) 0:00:19.867 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Monday 18 November 2024 10:09:22 -0500 (0:00:00.048) 0:00:19.916 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Monday 18 November 2024 10:09:23 -0500 (0:00:00.854) 0:00:20.771 ******* ok: [managed-node1] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Monday 18 November 2024 10:09:23 -0500 (0:00:00.053) 0:00:20.824 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Monday 18 November 2024 10:09:24 -0500 (0:00:00.793) 0:00:21.617 ******* ok: [managed-node1] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Monday 18 November 2024 10:09:24 -0500 (0:00:00.053) 0:00:21.671 ******* ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Monday 18 November 2024 10:09:27 -0500 (0:00:03.523) 0:00:25.195 ******* skipping: [managed-node1] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Monday 18 November 2024 10:09:27 -0500 (0:00:00.054) 0:00:25.249 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Monday 18 November 2024 10:09:27 -0500 (0:00:00.069) 0:00:25.319 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Monday 18 November 2024 10:09:28 -0500 (0:00:00.049) 0:00:25.368 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Monday 18 November 2024 10:09:28 -0500 (0:00:00.043) 0:00:25.411 ******* skipping: [managed-node1] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Monday 18 November 2024 10:09:28 -0500 (0:00:00.059) 0:00:25.471 ******* ok: [managed-node1] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2024-11-18 09:55:10 EST", "ActiveEnterTimestampMonotonic": "428502376", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket dbus-broker.service system.slice basic.target polkit.service sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2024-11-18 09:55:08 EST", "AssertTimestampMonotonic": "426884414", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "892335000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2024-11-18 09:55:08 EST", "ConditionTimestampMonotonic": "426884410", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ebtables.service shutdown.target iptables.service ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5382", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "6554", "ExecMainStartTimestamp": "Mon 2024-11-18 09:55:08 EST", "ExecMainStartTimestampMonotonic": "426897141", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2024-11-18 09:55:08 EST", "InactiveExitTimestampMonotonic": "426897531", "InvocationID": "af86cb355cc84acd9c52467544be3b1d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14750", "LimitNPROCSoft": "14750", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14750", "LimitSIGPENDINGSoft": "14750", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "6554", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "33726464", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "18446744073709551615", "StartupMemoryLow": "0", "StartupMemoryMax": "18446744073709551615", "StartupMemorySwapMax": "18446744073709551615", "StartupMemoryZSwapMax": "18446744073709551615", "StateChangeTimestamp": "Mon 2024-11-18 10:09:11 EST", "StateChangeTimestampMonotonic": "1269827430", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4425", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Monday 18 November 2024 10:09:29 -0500 (0:00:01.178) 0:00:26.649 ******* ok: [managed-node1] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2024-11-18 09:55:10 EST", "ActiveEnterTimestampMonotonic": "428502376", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket dbus-broker.service system.slice basic.target polkit.service sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2024-11-18 09:55:08 EST", "AssertTimestampMonotonic": "426884414", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "892335000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2024-11-18 09:55:08 EST", "ConditionTimestampMonotonic": "426884410", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ebtables.service shutdown.target iptables.service ipset.service nftables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "5382", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "6554", "ExecMainStartTimestamp": "Mon 2024-11-18 09:55:08 EST", "ExecMainStartTimestampMonotonic": "426897141", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2024-11-18 09:55:08 EST", "InactiveExitTimestampMonotonic": "426897531", "InvocationID": "af86cb355cc84acd9c52467544be3b1d", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14750", "LimitNPROCSoft": "14750", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14750", "LimitSIGPENDINGSoft": "14750", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "6554", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "33726464", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket system.slice sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "18446744073709551615", "StartupMemoryLow": "0", "StartupMemoryMax": "18446744073709551615", "StartupMemorySwapMax": "18446744073709551615", "StartupMemoryZSwapMax": "18446744073709551615", "StateChangeTimestamp": "Mon 2024-11-18 10:09:11 EST", "StateChangeTimestampMonotonic": "1269827430", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "4425", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Monday 18 November 2024 10:09:30 -0500 (0:00:01.203) 0:00:27.853 ******* ok: [managed-node1] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Monday 18 November 2024 10:09:30 -0500 (0:00:00.078) 0:00:27.931 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Monday 18 November 2024 10:09:30 -0500 (0:00:00.053) 0:00:27.985 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Monday 18 November 2024 10:09:30 -0500 (0:00:00.046) 0:00:28.031 ******* ok: [managed-node1] => (item={'service': 'high-availability', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "service": "high-availability", "state": "enabled" } } ok: [managed-node1] => (item={'port': '1229/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "1229/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Monday 18 November 2024 10:09:32 -0500 (0:00:01.942) 0:00:29.974 ******* skipping: [managed-node1] => (item={'service': 'high-availability', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "service": "high-availability", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item={'port': '1229/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "1229/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Monday 18 November 2024 10:09:32 -0500 (0:00:00.118) 0:00:30.093 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Monday 18 November 2024 10:09:32 -0500 (0:00:00.093) 0:00:30.187 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Monday 18 November 2024 10:09:32 -0500 (0:00:00.091) 0:00:30.278 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Monday 18 November 2024 10:09:33 -0500 (0:00:00.083) 0:00:30.362 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Monday 18 November 2024 10:09:33 -0500 (0:00:00.069) 0:00:30.431 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Monday 18 November 2024 10:09:33 -0500 (0:00:00.056) 0:00:30.488 ******* skipping: [managed-node1] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] **************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:52 Monday 18 November 2024 10:09:33 -0500 (0:00:00.075) 0:00:30.564 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Populate service facts] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 Monday 18 November 2024 10:09:33 -0500 (0:00:00.067) 0:00:30.631 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "canberra-system-bootup.service": { "name": "canberra-system-bootup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown-reboot.service": { "name": "canberra-system-shutdown-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown.service": { "name": "canberra-system-shutdown.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qdevice.service": { "name": "corosync-qdevice.service", "source": "systemd", "state": "running", "status": "enabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "running", "status": "enabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "geoclue.service": { "name": "geoclue.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "low-memory-monitor.service": { "name": "low-memory-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "running", "status": "enabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rtkit-daemon.service": { "name": "rtkit-daemon.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "upower.service": { "name": "upower.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set the fence-virt/fence-agents port to _ha_cluster_selinux] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:12 Monday 18 November 2024 10:09:36 -0500 (0:00:02.793) 0:00:33.424 ******* ok: [managed-node1] => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Get associated selinux ports] ***** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:20 Monday 18 November 2024 10:09:36 -0500 (0:00:00.079) 0:00:33.504 ******* ok: [managed-node1] => { "changed": false, "cmd": "set -euo pipefail\nfirewall-cmd --info-service=high-availability | egrep \" +ports: +\" | sed -e \"s/ *ports: //\"", "delta": "0:00:00.262186", "end": "2024-11-18 10:09:36.844750", "rc": 0, "start": "2024-11-18 10:09:36.582564" } STDOUT: 2224/tcp 3121/tcp 5403/tcp 5404/udp 5405-5412/udp 9929/tcp 9929/udp 21064/tcp STDERR: egrep: warning: egrep is obsolescent; using grep -E TASK [fedora.linux_system_roles.ha_cluster : Add the high-availability service ports to _ha_cluster_selinux] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:28 Monday 18 November 2024 10:09:36 -0500 (0:00:00.725) 0:00:34.230 ******* ok: [managed-node1] => (item=2224/tcp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "2224/tcp" } ok: [managed-node1] => (item=3121/tcp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "3121/tcp" } ok: [managed-node1] => (item=5403/tcp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "5403/tcp" } ok: [managed-node1] => (item=5404/udp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "5404/udp" } ok: [managed-node1] => (item=5405-5412/udp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5405-5412", "proto": "udp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "5405-5412/udp" } ok: [managed-node1] => (item=9929/tcp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5405-5412", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "9929", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "9929/tcp" } ok: [managed-node1] => (item=9929/udp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5405-5412", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "9929", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "9929", "proto": "udp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "9929/udp" } ok: [managed-node1] => (item=21064/tcp) => { "ansible_facts": { "_ha_cluster_selinux": [ { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "5405-5412", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "9929", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "9929", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, { "local": true, "ports": "21064", "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ] }, "ansible_loop_var": "item", "changed": false, "item": "21064/tcp" } TASK [Ensure the service and the ports status with the selinux role] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:35 Monday 18 November 2024 10:09:37 -0500 (0:00:00.245) 0:00:34.475 ******* redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node1 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Monday 18 November 2024 10:09:37 -0500 (0:00:00.141) 0:00:34.616 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node1 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Monday 18 November 2024 10:09:37 -0500 (0:00:00.124) 0:00:34.741 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Monday 18 November 2024 10:09:37 -0500 (0:00:00.073) 0:00:34.814 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node1 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Monday 18 November 2024 10:09:37 -0500 (0:00:00.116) 0:00:34.930 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Monday 18 November 2024 10:09:38 -0500 (0:00:00.463) 0:00:35.394 ******* ok: [managed-node1] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Monday 18 November 2024 10:09:38 -0500 (0:00:00.066) 0:00:35.460 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Monday 18 November 2024 10:09:38 -0500 (0:00:00.479) 0:00:35.940 ******* ok: [managed-node1] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Monday 18 November 2024 10:09:38 -0500 (0:00:00.074) 0:00:36.014 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Monday 18 November 2024 10:09:38 -0500 (0:00:00.079) 0:00:36.094 ******* ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Monday 18 November 2024 10:09:40 -0500 (0:00:02.078) 0:00:38.172 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Monday 18 November 2024 10:09:40 -0500 (0:00:00.062) 0:00:38.235 ******* ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Monday 18 November 2024 10:09:42 -0500 (0:00:02.076) 0:00:40.312 ******* skipping: [managed-node1] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Monday 18 November 2024 10:09:43 -0500 (0:00:00.069) 0:00:40.381 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Monday 18 November 2024 10:09:43 -0500 (0:00:00.096) 0:00:40.478 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Monday 18 November 2024 10:09:43 -0500 (0:00:00.099) 0:00:40.578 ******* ok: [managed-node1] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Monday 18 November 2024 10:09:44 -0500 (0:00:01.032) 0:00:41.610 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Monday 18 November 2024 10:09:44 -0500 (0:00:00.058) 0:00:41.669 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Monday 18 November 2024 10:09:44 -0500 (0:00:00.060) 0:00:41.730 ******* ok: [managed-node1] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Monday 18 November 2024 10:09:44 -0500 (0:00:00.097) 0:00:41.827 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Monday 18 November 2024 10:09:44 -0500 (0:00:00.070) 0:00:41.898 ******* skipping: [managed-node1] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Monday 18 November 2024 10:09:44 -0500 (0:00:00.099) 0:00:41.998 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Monday 18 November 2024 10:09:44 -0500 (0:00:00.061) 0:00:42.059 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Monday 18 November 2024 10:09:44 -0500 (0:00:00.057) 0:00:42.116 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Monday 18 November 2024 10:09:44 -0500 (0:00:00.056) 0:00:42.173 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Monday 18 November 2024 10:09:44 -0500 (0:00:00.056) 0:00:42.229 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Monday 18 November 2024 10:09:44 -0500 (0:00:00.059) 0:00:42.288 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Monday 18 November 2024 10:09:44 -0500 (0:00:00.027) 0:00:42.316 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Monday 18 November 2024 10:09:45 -0500 (0:00:00.026) 0:00:42.342 ******* ok: [managed-node1] => (item={'ports': '1229', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "1229", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "1229" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '2224', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "2224", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "2224" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '3121', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "3121", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "3121" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '5403', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "5403", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "5403" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '5404', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "5404", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "5404" ], "proto": "udp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '5405-5412', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "5405-5412", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "5405-5412" ], "proto": "udp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '9929', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "9929", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "9929" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '9929', 'proto': 'udp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "9929", "proto": "udp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "9929" ], "proto": "udp", "setype": "cluster_port_t", "state": "present" } ok: [managed-node1] => (item={'ports': '21064', 'proto': 'tcp', 'setype': 'cluster_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": "21064", "proto": "tcp", "setype": "cluster_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": false, "ports": [ "21064" ], "proto": "tcp", "setype": "cluster_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Monday 18 November 2024 10:09:51 -0500 (0:00:06.410) 0:00:48.753 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Monday 18 November 2024 10:09:51 -0500 (0:00:00.026) 0:00:48.780 ******* ok: [managed-node1] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:477bfa45313dadfe226d6ab9e22295cb280f90231dcd9d667babc46c8489bb06", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:9462acd21b3d2e0576d891dd00afc97b78845f1b9ae485115203a25218c5d85a", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:e01843abc23ed1dfc43793ae85e8c9648e496c605be6a6e4d8b104b0f7282daf", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:cc69f04942026a1f97b65205f8e51e3f149fdf9441a8733d90f5956b580e9e4a", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:dbacda814496018856ee6740302a24925eef3798a3f2f825b369d2717af2abf4", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:e12d5af1828ae63eb4e51ee0d5a46527ca582bf5a6bcf1ae6336dda2dea29789", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:17249a8a8e90ffc8e44daa650b571a1838715c674fd977cd2dc74d552c946ee9", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:daee5413535acf3c3fc8b4ea29086b0a749bccc99cddcb6fc50f3138e2c5ff57", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:d41b521fce07bdede4d8b4f027884bc16872e95ac549166647af8eb85bf56e0e", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:9d434ee57dcd8d078a9529582f242ab417621bf6e75d831730cbdc1d1cffa464", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:ca265fe2b8fee3d612589c4d4ead9dc95a29973168bc132152c8eec9f82a1633", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:7ec24e04b2ef91cdb2b4067c32a3800c7de8f55b2ae28e60b6198a0374792122", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bacfbe974e4f8671578e45466f78489fb4cb893f5c1468ed9e1905e66d7a9a79", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:900678b1de5f90b6f9e1c26aeeda5d56cb1f58f1d09478204b49f9d4c38c1bd1", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:4559f5d49184ad2d355a1f8dd6b92e70ab6831d5e8098cc859c0b2661513846a", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:223516cbd857a829232274bf343ebc5a1ee227b426e6c3afbf2f8c9450916a54", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:05b45e998424472dae29c23e442f07878b6570335cd02acc689f00534a818dd4", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:da23a941cd42d1741292185410a94c72d4947497310b7cfc02d1572a79fdd58b", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:c9678391cec7fd06f40a62159750ab995a0f10acf2a5700231ae33fde29a35d2", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:59c828b4cd8dd94db511648e70581134f229f636ead76bf72bf02497ec05b740", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:2d2d40b0c619b7547fc920ff549531601223c96a7abaeb15f47d941504ab0fc2", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:a78a0fec9489595c5885759df07eb3770b32019d6badb1936d6da5606dfe8e2a", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:d27d92ef5ee8d94a9dc2a4f503e5f833f677fa001a001a4bdeca2f0cc40e8440", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:8e2edc9d157d57c5c82d4b43745d3b46b69e392b9f0b772d25259dcc8aa9a9d7", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:c0adf88ffec7970ac1e9cf2d71c35dabeb6dc44018d9babe88e7e8585acd8114", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:b277699416695f07591a950f123f8b762a8a0845b5ffe52e219caf24e0436449", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:ecc5a93955f36f3ab5ec4af95e5cd14d60969574e8a832238dbe9a9cd936599f", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:bf434b49a33ecc7f4bf438a94d50269a7429954e2802b886add9c150bb0d4163", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:abaa6af6ab00e318a16f5d1f4dea08243b3ac3bf15e61e17d15eb586961f66b5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:3b844735b04f7d28bd75cac9ac1bbd4b9b7671e18122c196905689b36b1b0b8f", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:2b3b285a92dd4a80aa7b815998c2e710367ffb53785bab4880ae0c433f49d3bb", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:18d958e6ebb345902a9b0d326351172e0d5a64328c14fe2ee7a00e0f157e8b16", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:adc12de8ddd06b5810fee732f46d4665fa9d301891b3aadccc5d747158f3d753", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:e473592f28168b28716eaf83a16912deab591484685c6c26756280fb067ebffb", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:33297b8dfb0f57ffa986470158d9bb5fcfff9b7a2b765de9af35b05d54f26ea2", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:754e355cebb2c0c0c5344aaeb6ee8ab30ccec5bdf260d1d843b4b0354138d675", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:3d36490687f1e0fe4ffdfb1800910a7b2b83ee6eac835e89c7d36b2bebd8ac18", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:e17343b2c974d41292a690ff1db38e08a5a19b6652df14191998244e174d08cd", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:d9952acc35a34270425ef8f08ebabbeaf4d6593c274be4af8819e4fb7b9a8e3b", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:764651f7e234fa057201acc7fa7b42b24f16aa766b3ae1d9413adfca7cd27607", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:35dbde07d7f28e2cae705f5a9efa7343b8ed0a2a3700f970a1b977fae304ae13", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:bb8be39bd5152188630ac4a29597149118eb2685a1ae200fe736dfd0691ec2bc", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:7df5db3e4742d902d906502779b50e67b10f1c8811d83907e3623df765a8c660", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:94f000c34cdd93051e7c9f13870cd586cd1894ee02e66ca01495ab5ccf9831e5", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:57b88e2cda1fc5b6286483cb8ffb9932cc7e1b09f8956aeb4d7a564ef315e9ae", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:f143f674cf5691c3cddb930afbc750db60fcda8e432715cb6113a29af97a4b26", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:aa90403e9af7721533766d57e87d522ebeb4b4313bf1620bf4ef7484550143cb", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:16408cd918f6f2446103168f9cb17f1075426bac99a95b3e54cdd04131f0fdbc", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:3dcb55c090bc4483a06c5be808d4cc09cbee4d86197193f84a8e80ee5b6c8f59", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:5220e64fe7413e2abc7767d50a7bace47411ae14c468804bc3ff9618e8a48b7c", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:bbd394d2d7712f3d29026b7d52bc514bbd717c23d56d51a28efc2bc0386aa68a", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:f359e08980417cfb62d7bf3bae0dd6ae8ad8ebb7c4608ca966f66a23f77fdf16", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:fe3361233eb462c8e911b3e9f32d2a1b22ef32879de33e88c1d09d8335119b2d", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:3822faf2dd9f0260ec4900d822c4912f64def93ad90afb1d1f242d6f1da8fea0", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:6bd80b86af776137eca26d36d54a7e8c71319a9cfe16b3475cce3aaee7b18260", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:8bb0aa263f05b2c52186f45ff7e01c74a192f08084d2242f3e02056d22b63347", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:75ce19bb5df84a28397e5095ec6cebf41443625c5b9bc01670bf097b2c74b07b", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:9d2c3931d997bcfc3ded32472ab56ea6555ed13106ba716a1b4eebd66e0c855b", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ed241a5db82c399f5c915bbe316c8a6eb127f7db6b6a5e18289f0c76bd06e19d", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:3fa7ee262668f5b608beeaae2fd91ad102101888296eab606ccfbd6372e9bf9e", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:aad1cf38e77cb63b32bb4c5eee49c8c744f5e9108da49b7d19e2c229dbb19f7e", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:46da1bda7d0dbf00a4b868507aeae79e3dc2996413d7563dfc4a18e058d76390", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:776b464c4960b29f3e073f0c54724022e05cc9751715ca0663b76395c9782b6f", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:c9fad41acb4719b29396f62ca49d76a5b3837053915f61507226806072ecb531", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:3a3a311b67eee551f083a42218f7e10b6768cb7468936abd56fff815a3a9d195", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:b2f607bb92bb745a9da41564625c01a43f767941dc722cd23d1f0f010f37ad22", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:be2679199d2380a2c74b77526f87224807a99e852b24a2720fe9c4ef80f4579a", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:6473a45829b3a3d2b58d805a8fc780c18146540a3c97f10c2a4534ea8754bf8d", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:2f3242dbd5e57c3d3c99bf06c071ea5c9a1f317c622021a65db25fc0557872aa", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:9d0ceec2815e77a2f6bd06b743b9967242c232989498af3b64bd4555cc1f9f78", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:43aa166f0fe88004ddef4d575a982fb6bb593859428f09d3320e31483d8d13b6", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:7297e25da06db9d2d71049e1b3b32c9c955584669904eb8be765da58a3d01aaf", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:2106974ab439f07b4ef19fb65ff8d6fc5cfa5d66e7729e43967997f327dfd2ab", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:be682ee472b02451450421c0119c03c9b447c0b9eb4ad5dbb9bd10b0de4b9acb", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:37352a9b82e50f5be2d50629154ae995e0cc011ee6f1969678f26b4ab80a7e08", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:6b2981d7b18884b33c88c29c6496ebfda453f40f4cc88becd4f109be5a4e8ee6", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:3a173918265c1cd4e54838bdd1898cfb5c605d58794b4e00a333c9c96eb16123", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:25356da928f62a431eb2bda1ad1d74075867a0eb573826be4cef51a706e0a746", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:ab96a81b6eb6799b034d863f01e633c8c748c6b712b2ddba4ec931d779eb3b4e", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:f14393aa4957db71ab968a0c637baf624fb47dda7dc4e5e3dfd1801eccdf5e9d", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:7e6e7bccbb4f3da0393e07351d1e440dbdc58b33336655a4a7137c59779dd44b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f41930572c3bb696f4be26985967bf69fc1e6f657335f26f96f92bfbf8363ba6", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:ecc807bcb8fb70e62c11ba460aea07e786a854f7c2fd571588ff1ea3b6f946b6", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:ce70c6c037ce6074103b0faa55231f16bb4aa5eea1f3f9efe92100d9cd582b76", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:3a1d9f6c7b818da65a78aabdc1532e0bf3b66bf1f92caa46ee0780d9a7c26eaf", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:03301506d270bd080922da624eb3640f59291c6e27c6de25ef4e88f151599f00", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:adeab7af563c5294ed1569fcfb4a7f6915231440bee869ea8a0e50b0de6754b7", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:8f17d96d908f287f10794b4f055eb94b55ee91c9158272b1ed35f591709c858b", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:9f560340ee73e6dc4dac64900b3fb6967650a4380f697611451806b651dcd435", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:2c7a5005d072831bfa8f094afb09e23917d4b6cf0a89ddce1a956bf2d83ecab7", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:066b8a49cc368b2fc52e944ea04ea440828bdcb2557381e282f43ee31c05e34a", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:c00c321e5bc4e96a46ef7e4f06f8c8838dc20e2e052f86f1ec556ad87ccad1cc", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:d8a9dd69b4c1eaca0f22dc3a72fb0b7f2835fca23729a49243885d74f3046133", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:ff75493b4930a9ee6cb34f72ee56c8a062943b14e73901e76b24b90402d9abf6", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:465ed9c01087dfb0b86421812a4140ad8160c871ce045453317c9c0caaabe459", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:de91812dc8eb7a02868e43ae777952d59431301a119aa6d20c4b5aac59587799", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:7df91b8713976d5269c5086c4c26f4eb9021f623f226370fcd38c5fdf19560a0", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:807e54d75e1426b53ea2b0bca7714bdf1d08afc6a566ba1370f1cac3fd1b95df", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:9b914a1cdcf2f9dcef96df9ec9b2a05cd0c8780110b6f2b6e3650efea3a5ff81", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:7b6c6b734fbc35d880e16304db199407918bbf35a8271252bdc81d0ca82e3270", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:1f38ddd17bafae62621f88bb7c3ff355917daf15137838899194b2cca92545cc", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:f3abb39b793d2137c46150c157db61212a60658c722c8ab8a48aff5faa6bd000", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:ab30ec06a5d9a2200212e9898a51d897802b061883241b694f593ed0ceae8e44", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:078a5bc19cb420f99d553617830855fe67c8b1d932809b19435c6e1cc3514d4e", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:c853844595039d851838b45725c3a6dcd58e7b00b925c12b3f2636f49cae8e50", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:d968beed86ab2525e3fd926d4f5b0cecc87122106b116cf11e1b9fab9f4800b0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:1e0a07ff19c04e59b6a75d7239c6e74dc41d3f45a723db7096bd112b9c65c99b", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:7da51a39b485200ebb5ea0702ce217f60dd81399daee41a33f89473b0f3f0c9b", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:cfdab3b58c8373e1dace94d553137e52ad24fb0b58ea55ce01429d04bad2e8cc", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:b85abaeedc5417054b9828753b4feb55597e92dee3d35107e583b537b44479bc", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:9dbaa617aceb328d443166f49be58e9e7fc30b2c45e48368eb75948e56073ce7", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:0a49b595369d24fdfb46bc071c4b8b38161cb2f92b0ae230e196e7c85265f457", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:7674a550260c883191cea9607244c81b925cd1f902743673071b356fcb26e07b", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:5016b57ea8027abab1d330be745134bb6c3299b004b3464d30d75c8f28ee1e33", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:89eca0e8f8a5fd3b04eea68572e4f2ab203b1f4f65c2db6a66573ca2445b6f10", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:aaec47a4f9415fae72eb31cc3ed4062c212d6da089b95bde5a63da6d5ca5e8ff", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:c17097530cf912fe51f04543276826db12326fb6e0538646a76840dd15a27144", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:5b6a6ada4fe164616e1156688ef505501bbd7b82358ec9f33e0ba9d8cb38f533", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:b4b658505037a371eefe4517886c4e706c303e3ca6b3bf3a338968b43cca4c68", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:fdcc22033c9d77dac04be9daa7d90f7337cc42523ee9e138f8b7424a992faf95", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:8a6c115c3bf833deed3a16b8549d07839a8ef66f0c26e383012f6433981ad829", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:38507a42c1faa2b62ba7703782ed671ef9629a97fc777572fd00795faee9ce2b", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:86f14d56f417b455311180b3575135e263714659bdd9ff41f4614c655386a773", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:a3678f7d00e43fffce49c67104399412159cdcc6e678a22752f89945f50faa65", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:e34198cb7ecb485ee61efbd2e149ff32d39b2b60b98ef86f89038b4e1f9ceb9c", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:49266c8cb7c7a1065343dab49066aedd3cff809f72b5095d9345ffffedb97718", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:cb7f632479be616093f816ce0025f5e0886b768fc2ea878da024c1c7372b5185", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:02765ebb3cf9f00b76a60da906077e0b2bc2911ad00efeeb49d7908e669c4ad5", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:5a51055c6aaea2d7db8a2f963b2324edf2b86ab04404708234ae041917b7bad8", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:560baef9bf0a0f6b28fa51938195939b21d20a406be7e7bf5f4ac1130d57b0b6", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:7e30880298587da9116744b053571caba236135f9a9d57916586b7f2e1fcfb5e", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:7b3fd1932a16bd3a411d4fa46cceed73b885b71d208caad4d5f2dc9375227a64", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:f3a83c0e9ed9a513443d26571f980b511155f1e9728e5cc9259103c85e8cfd21", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:8e309d508968f0e56d3878d6d13fd1c29b41c2ccc96bef44455b36fa93601864", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:399399f8d33bac29a760654a4175ac4424371cabb26f4650211f0f7154fc05b9", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:d516e84e9629201c76ed2196c8c7b8abbd2a8ae3d7a1023a1f579cde605c6cf0", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:afa3cbc6b9e5a1451918de5c62fd74b74f65b114fe5443bd7fa9be8f9ae3e210", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:43246b9a0a610f6253cd14bb4bbc7c40f20c0f33f98c987703bc92859b445ff9", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:44840920ee13d33506a8d57a598815dfff09a0939485f3fe4f77bb8432e9de1b", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:1a9fe32a678562339a1cc7db4b6bb250fbad03f418e3baba374e6b19676ad69f", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:2984c88fe19eb4293299fa6ebebcb85f77bebdc65cd058b39c6be1cc1246d748", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:26afa99fcc6380840d15b00c324d5beb7f2324d2cd60a4944eda5cc679c3bf0d", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:0ee1a484be5c543deb36551994d85b722f71c6db5278f9a243c4da17c1589cf3", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:238d3eb6487b964dde6681bb0f55ee17afacfa4a7e31194812a72d0ce41e7e96", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:709249be03e21fbe9fa6ba11ec166512ab278a32e414cb9cb6e6a51058390b0a", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:3726ddd4cf7b45bafe1e90dce8c6361b10ff51609f5c25179a534d00cc4daa9e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:941e078fd84f0e1c43885b2d66808832e2eb2b000dfb124864cacd3cb7e06b7a", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:7a12658f8254d34fdf70a9f383af69fbacc783ebee84b3e97f4c7e57ef827155", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:56feb9828f4e805da0b45b621629c397b8dd949a30d9bfe737f903b458cf0465", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:614bc9604d713acee571246bcc152147e91948465fa0466aed440bde84fd72fd", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:7b90ed26fc68651871c44a131038ff67acc956df1b4d05df8e4c0ac37ed4d88d", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:9bfdf05ecbc781ea11ffa37ecb4304463dfb7c84cba7c1f4f25331ecea4759bc", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:aa3c3cf242ea7c7c201eb8d96d1221241bd34a4c15106a7bf479281e7c768569", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:c3635542baef74852e34fd19e2f94dd54202b694f1e05f95cf5cd1d284d63ad8", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:302b9b871aa862017f4e1399d8ecc2c2ce0d2588cb5681afb30b684d06591aa0", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:34343787edda98fb2f23253625e66b8b0ff1f7bd74a714c3c8fddaf31b173c3d", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:dc82d679731976d262aa1a509580e5b00610e3207fc7177e2ee61c261c338a84", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:35149e88ce9d858a4b6f72dbef0b0ff12cfb924bcb219d0f2a89112dcf17763a", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:53a9a5f8fd0569fa5b260152ea4a8b95e1eaf2baa9a6efd807b7c912b1da5a5c", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:b8b3b748ba7c4f6efebf7d22cd76bf7912f078c98af1b25361a633f3adb6b001", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:6c9ca4332367d3929f89a2f30dfe61f2e4eb53fcb8404f14b3364260edcfafba", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:1741e270c445dd1db0913e7a1440f57a021fff237072489cd1c9c81bcb5de8c5", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:c3b8b532c41c9e9aecb4513a3f6691f84ec9b844af3e54fd36f7660e8cecc594", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:20a779f5f4ba8c6597d8978960adb5b85fb2d4b7f40de1f633cf23453e4405b1", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:413e5845169155e71910735491c74fb51b8fd17320febd0d8c2d77f3cb219229", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:afe94d2a26d9364eb99683b9421bf93d52804c4dd4d0937ab464428e11aafd01", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:003387f645689dd6f5ea9c199d8227447588f30cc3f35ebaaa788d88481a2a78", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:3ee03692ad54576d535a1500c124ede3b2593f751544f4aabb69d8e5676b0293", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:4fd25ad58276414ac338e957c8f837314da853f77ed744da68fed8d52f272eee", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:b205a12251069bbef25eaef9e1fc96c9a8cd66b240b05a6adbcbaf7d50843828", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:ca1b97c55090674029a69712fc0d3d494509ad557633906b7c1d68333d787ab0", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:0ed09c8657437aaad1cce821ae236385ed5ae0d5db00f296bae7f7f88bb6c6c1", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:78b6f622eb7b91cf75a64977006217b95dee0bbd5c6f308f58e4a55c41bc8b5c", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:098ee954fdb5f67145421eaa204666482e85f6063fbd538b4cab332a993dfd58", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:9c8fb984ca6f831a7d3fc14e39b11199bceb6a4889fe04dbcd0defd4e5175866", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:6605a9b24096abc10238b2f2c0705a24f57178565e82af92c0886369f8c16d5c", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:b60f136316316479f7e2536f0d288b9b80c7c900c5a8e27af52eafcfc758e311", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:33c6f26de4f4f34deaf1a95542bea744faacd920ba5b08a6895d224c33dc9433", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:937bf2711d14a3fcbab6c768eaf9f01110a0346831d22b0e13a5ecb32bbcfcaa", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:991ea8a8233f61129c87fd2688ddd77e8edbc262eae49904a82fb5b9928f573b", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:8e63754d229140d22db6fcfae0284eac8f981ddab25cd37657afb2a2b7cd68d7", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:767e9d4324089f410e57274dc7022cd522aa0bc9fa516ce030c77e3d6d646a5f", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:b077926f6102e28cb4863cf775ca53dc650ed363613c74decd1fdca35f497dd4", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:ded1120bad52ac1aeb8e546087e49937ab47eccc524d05dd66c5980e0557e761", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:a4708f133e0070d180ca5dcedd2132f06b6338b647dcb30c6e7a3087e465fa38", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:ca7bd26d342cd63a9c27fab82ed7f7f085fb4e6c0c749ba7808990bf11be7d35", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:f17b14e268f906b4e8e4e279a7bb4197924e7009e18511848632f787537025cd", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:4e61b468bac1a34c41e1c322b53a1be1b9b8e650ea10c39f7867b4cf268de134", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:90bd08877279fac3ebe12bfd03f20d239ef2fa09ef3935b1b1918d79cababf80", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:7adef3480b6ef250d0219367851731bf37bfca02b466170446dc8ffc0c5fb2a6", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:329b3b21755d88d1c93811318fdc2bad6fbfdfbe9075ed74cffe320faff7afd7", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:0747400424bb5ed360dc7e2bbb1324570f3abdaa65331d1e4f32bb40f2d821bb", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:212fcbe07cf5f9878bfe13cefc2ff3cacb97e50eeb16c3b859a1ef2fb1e5ce85", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:7048444060b0911d7fab043943e7072bd54a22d0af670fcc89bdcdd8271468e4", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:9b6a094f2590dd2b257bb903f439dfc35b864947b03b0d8e0d7564bf00074e88", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:05c660b9f583b6c7462489c25007205ecfbe9aa3c9ff0c66f861b31a31ab5937", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:6ed2471e8c1be4d9b5171076a67d21a8ba70f54aabaf56740fa2958e3bee2170", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1126eb06a64e6c1b6fe9e6ae561d973d23984be68048e7e9487c544eed989cd1", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:f6e4c2d68a7d45a52bcc3fce55357b8df34e94ebdeaff8031568ced324b3f7a9", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:64fbcf0e407584a9f8914cd73ff904d50570305ab96ce06f18c6046c59fc14e9", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ba33ee6e5e9e64f2a86de1bca071dea4dbc94018ea7fb8077b0c64fa267e1b5f", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:77d928d99c7cd52f8f88a8290b1d8bc78f36fc76ab4026820297f78068eb17d5", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:18d4b8297f0ce54de3052d5ca02f91d35e2d3a4c8add041ed8682d28696df8b2", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:c143f04835f3aed41f8d095e5ff0c857fa760379554f8e48472d0d56d22cad4e", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:d1b8c9c665167cb4e506cd925240eae787098bb1c6d64abf4a494732a43ebd58", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:6eeefd4d734aa2588043264f4816d2531188084a7ab5d44937c42ee4ecab2567", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:96df095ffe73721c95af843bde3cddc39af2d851b74bed0e53a62b1dcd82c949", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:29f1bf739d55572ace4e20cb93384dcfab29b264b67178aa2d7e6b66632ceb87", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:2a7aa16cb172062dd71b85d1fae181802d22192ee1b51c8d0697c12d8f54f739", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:993b50fef20e341fb47b681ba9ccec34694335a2a492f7380516ecb5f7ef2b96", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:cf6f24a4c40a03518ec13d32cb6a86ab7f8f3de50dbf7fe7793703bb7cd678d7", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:23e5e874930f99eed89377a1e37d1aa866133f7c8a95df35f43b1f98d4ae7215", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:b232ebe651f7bcd5060989bdd098352ec0c126c62067e0b7835ec0eec713c509", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:179085b34e9490f40b1302b920e4e5b712116c96d6c84442996b11a7216602f5", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:aee129d2252de01c2571cd553c516fec1a66b54716b9afbe6dc0754fa401cd30", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:49295d3bc00c801a3f1bdc89efc1e52628c515aa5c5f00a391e7e63b34155987", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:5c76192917b290649c6f290b6a9afa379a81c0a8c8923238891e4e1f1b1fc60f", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:eccf366bdb0405b4e8129790c9cf30be1187fc0aebce18c7c58caf2258581b8b", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:3585bfb199b90f263ed283507470e4565bf176a08cfa1a28d1a1e18db654ec5d", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:ee37295285e4a76f3982aa98f7eaa24d1e25b124f8b5ed549af8c9d5fa38f68c", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:11afd1c2471a2b039f2ba7b5ecddc364ea468159232affdc1ff86ba1c4b1c8f5", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:371ccd443169118eaf38413f16f48017814506394117555db67215cbd34cdecb", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:11101d7322ea3ca5481c6f9ea6abec91df4ab34a11f2400f1366a37b8eacffb4", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8ced3143a16798e0ccaedb9f6ac85b0cf848ae49f6058b2ef487aa7a0ade8f8e", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:bb009b1afc1b7c36ff8d7535654b176114f5d907f1d92d6d01c34687ea3739d3", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:39d86593c1c9ddb60dd9c3d22a75c483aa12767e4e1fd020074e45cfacec57dd", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:5f855b905681b5931f20f453505002599ac0fc5d7576fc090a1fcc39c7e64174", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:ca8d26c5f15e28f85203d9b49b4df94c04132a37375a14e363e9538dfab892af", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:e36f0b648311abae7af932818d11d8d703a5ef8cef680de9d40ae5dbb498687c", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:4cda6056e774445a69f90a0cb47b29cf22c5356386d1ee893e6696a479f316ff", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:2f558994e7fd94141c4986aa4018266380248521d482f0d2f1e1bb3d00bc34e0", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:b13c01745c249210d382385a9930f22553c8e038b3b2af1ba0744c64f0cd2f71", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:175ceb460d006f30dc01f7a08bb76496b1cf2cb26ecbae622b056937bd8a1c3f", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5ca7a227fdf1546c8f91e71a51633026bc037817fec9935ce3a882f3fb1873ec", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:674c362fdb4ff8c0b67e6e40a643a439e8b95bf38f4b334f78d72a56a7694e29", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:5d24b99ceb33fd103ce2b72f603cf3b8dc4c4803376067ce499beb80ebf99b57", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:0f7c29eb9cef6e8c708d36a9fd878a54e55b3f99edc84e206adca46f5cb7ee5d", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:afaf4b7423fb0b8795a1ef00fe60ca7b6dfee9dacbb0e1f3ca2570cadb5c8c47", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:05ee59e4f0ccc55dc56aafe8c50056389dafc36b55397cc5ca7c488efa4bd153", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:1f3f26e68904be82472e2ca9f007d2fb9978e16e707de6bae15c8e2cc3b02175", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:fd02933a56d91b9f6292067cc2e4ae12e08342eaf1cd338c267f580bbc15bb99", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:aa0f47d58ea25be2a2d931526f18332860ba6383f3cc9757a50777100d7a643a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:15bed90aeb71d059a12f3f224accb5f736cbdff0f23990a9e9453a10b828b4eb", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:ef67026f83fb95f32372e861fa63e26f390a800ddbb65df14ed64233a60fc528", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:0119b24ea332686efd7ae8508b82fd1d6500c5982c82571d9dbc096291ca1d11", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:bb512e88f449af921514c204c9ed8b91edbc1524e98e1ae36603f8efa7bdaf14", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:fe8362242a83ce98cc7ae47b8a66c2a4bfef75be1d7367c0ed91e31844f14949", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:7b0f82bc2b3eb92ef873c2a624df094884c6c876928624b18e18d1cd9d01ff20", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c9006dd1012aa5820d8675b1b73faa5b31b2fb8952f147a98d911c9763393b86", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:0e0e8eddfeb25c7022baaf99904ebd2cbd6ba01eb42d7ebd9840d786e869b39f", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:ecbc811a5d5b8e66256d2863df65daea26e69a3ff4aa73fa496d2430e43f11bf", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:7e9e74342123ade0c59725f75a7afbff56774de0e91b703e50a9b86a55242707", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:40451ecbb8fd5b06b4da2f2a9d503c239bb040cbb6c0b2021991d92f805b6937", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:7c9b075fee3fae924d3e19b64e1a5a24da83030c026bd68049d1102abc6e770f", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:7b7babee3f26582a6df33a6d361fd237ad97265e8ff456bdd0e1eb44e1e79186", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:9240d49113135375b534cb7c7c3c21766b941323b66e77d1cd297221d0924bc4", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:249e45e31c373f43d466cd48763605888b2abfccd699c81d8c88069ea7d671e5", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:171979e9e95f1cd5317b146c00774ea818e795d03e5fe27ff981bd8fca7d16ef", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:953c0dd74e32ec1f769cb5b0bbe0068f02268cbf38581447a61f8397a2d25e32", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:87ba0e153f3ba1c53ad2d01dcb1fd1494d269f361f6fac74ec1583fde9e97975", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:e464ea0265a2e7350502360b8af6f35b0fa4e35394ead4b0241ffead64949f04", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:97a89a6e75e7c01fbae15013b73cd450173fe35f70b1df239e40e2396e553baa", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:fa71a4e7689ab63bdf9cca9856ee4c109c1ccb5c7f1ac2ae00441dd4390bb91e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:070900ae540857df7541ffd17bfcb5f8ff77ba028c3048bc4542f38b29817907", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:0a0af5a5e3e6c16208b8a774ae4edd6a8ad35de509083946fe711f45cbf4bbae", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:72562d0e1206f101a8d84cde3f49f1797c623e0e78733d0dd2fc56419b3f7058", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6ffbb18539d4793fada646816357ec67ecec65b59f6b11f850cca8f37e0ae90f", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:4cef507f55529f8cc40b558a8451ac116d9925082fe522ff1f099b9356160014", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:06da4ff240ba8ca6dbce786ae353a57e85a6fe703949c96c95aaefaa2e4dda11", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:14affb00239005ad75b3bdad55f0568829f5e1ae7247161d82d318abb87e2dd4", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:b0d814f848bce797bc127d7bf7f76dbeee5df5ca20d6edf41c7d9b305a0808db", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:eeed8b98eb64a4183391b7cfb1a84d5b7cb8c8369dadc25879f77c5be7b482d0", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:707cbe426d9d8c7e2f8c1c5fcc85ab4bcf8f25c675deeee02ebcb4a1dff2be47", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:86d3c2ea2b0b2215630226ad51be50dada7bb0d7b895c40a3f56ead55591acfe", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:07cd9caeee15b10cf40f9c65e10d4c7879f0de3aa5b6aa876cbf807673828ac5", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:caaeb30c9a1385611d5cf4063b0e81f470a05f01d2394a0b35d62b593d0112de", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:a89dcb4dea7902ccc18823b0646db59341385b6df10a840ec31d270e718f9506", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:d0f61ccc5e0efe1310f80f5f3dbaa81c90cef7c4e7c80bbdba34bd83d098c166", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:611b2ba2180645d5e6fc6b95d831fa10824baf36e9f6fb5f4b238498156c7190", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:42434c8d786a3dbcbdbcc89208e4395182574082b90b6d14a40bf365ca788b1d", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:81b03816b0c6eff95e8fdb1393de015c737d0bad01a752f0f78af9042b97fa1e", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:27e00e35cedb8928d19e0e18cc60d8a4adadab14b92ddf28b3a39bc5a9b0c6d9", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:04c2ec7c5c908e14d8cecb43a69be2e01a2a5d2e044803e1160cedb9bd18ebc9", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:f8f7574443b1eb598564b970f1257c93e10a4bb0ab4f2e497b78aa352806c3d8", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:aece743224943cbe2ed03b5134092e7c0650f703ce20cd4fe56c056c896b1d4b", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:8558f2885f81f626ee7f3bdbc7caff7ce69ebd700e35b4cbb9a74a11fca2c133", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:aa88fe777d904892b39c9a60f2f50c0a77a5f5bd5873b591a22eac6874a3238f", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:96b077e6ee179c3762f61152cbeb0a58a1bf1000d010ffd1ee7b78774e481150", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:9c48629fd51f96fba2cc7943369d9f16eb4389a0302776ce83ead122aedba511", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:8d131212c0226332b10bd4c345f980f2ef01f5a9577252d31669bb583f240d54", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:9360a29835c5ba1a6cd7327aabd73f635d21444c739bd114a38b89eeb4e32210", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:8e7c9b90073202f972f1eaa838da6cdd1b6ae21ffd16b05c31ac227e1ac5aad5", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:bfcbc5396ce78ace4b8f9d48c23223d8c5228a840c89926fa956f4f13df06e9c", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:2994283b036b32b4e64b5c3f532481cf415718e9d4c479b3ddfdc8a858e78118", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:05643e232c6b5365bafc75d9bf857e958077297615cafa49604fc088e10e8d86", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:392e11333d040576e8933ec780ed7921295ca92f9ca529c066de5029cc0f9bac", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:ded345f70432e58bcde53d4921b418cfb092fa897569401b80da018a54aa0da3", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:c34ec069349e0fcde2d4672643d8db4317e53b9ea0e51ac036d79b027e845e74", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:ce4a3f1422d7e9c411fc8bfe28b90bf9b66e3593ac0957c0c22f558536ef00dc", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:19bb471e6927d94eda8db5ab80a619b8c0b2fe87827030e94ce9b4577249b314", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:b14c8c21fe8ee2121e5817382e83fd2a25699836be8e79269026a2d2494718f4", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:604f153c5a11a110149ea907d6c3a8fdec8f812d8a4299bc107e47d4431f5d0b", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:ff0f6db55223b72e64312ee10b94a5b4a86ac44f2c123ee27a917dbe1e187ca0", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:64a0369f9b8474effd6e23726327009ac8728d77f0e3b9bf42b85b1a2d1b5763", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:fc41120cf629e139e30a493b7ad8d7a4f9b52b7f9cad155e5afb626cf3e1df29", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:1284680691e7027c5ab08b71ad4edd76f476cb3934b0e4cb83fc0769cdfd5816", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:32f6d325445c86094d8f4f861ec9430c5cca40c11a66c7905fb10a4fef5b85da", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:e13af8921501fa4e869a20c87963951145bc762c670fc1b996c4858ca430cc71", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:c9b5439b239ccdfd69266a8c756ada08b9cec67dbab9452c3d0a23c62800c9b2", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:415699214ad395ca4b5327d3fb214dcea004469fdc8cd42228c92ed3015d86c3", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:cf74a139dfff50fe96c532fd2f011d8f60d815745ac2f5ac95c4c47bd78715c7", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:572cb4b1205cdff477741bfdb35fecbe957b110aa2843de795de50877cdabc41", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:bc851a96ecc19e82d4d0b33ad662ca1c49e3425de81b4324d4aebde52dd5a17b", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:323777e53cddd3de4c55d0eedeb42a3990a27d71e58f6ad5f143eb5f1c7f9c75", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:25e1f5addd0d93b078b2f34fa6d1ca10cad53dd219248fe1c746e6d2bfa3544b", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:19c4a20b03e97e564e830221b99840103663837b6b250446f09a496cc2f7e560", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:03381789ba50d9f77db117a8902f09456020284330f49b0c4f62bfd09b0d3d5f", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:4a9130eb6fb632ff0de22ba9216b359a1be0f593ef9f136449a937c4677eae3d", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:439e9cbb532a369e861defce3aff2a82152498ffd5e6c2f00ca52ce91a526ed3", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:8464872de44b84f93dbaf1c899b7768a9ed26f9279271bcaaeb0ce5b733e83ae", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:ebc5032834ad38030e0f6b7dfc78e04f96a9313a6fc7748592aabc98826f91db", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:7f683dfd03a2e9ffc4dacde8964239adb6faf545cfb62521bb9bcf671c241de4", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:7a21fa964d09473eb836f8bb7e073ac9c346dd6b5995cd0a4396acf02510eb51", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:fac00ff771c3ad6dc291b05b907f69178f998fb36e1f9adf3714c86697c7a161", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:40d1eb0b28d21e5ef36616ad8e4786528e782991f0b7ee26ce0f1eff1c5d6094", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:e87d1b937a39e751a6b3294459e6788ae6c52847cef38962f489c64c29447ad9", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:f617fd3b543459f5d4b2aa932876efeeac51496a1da7fd7b2fa2beafe6bc2943", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:b11407e064027c75f286626c2009d88d9b5f90e4352038f7bf143744d44b910a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:df567fc4d933d9dea43e10e13b120ba77be68f4f44111db510827ff7d5aa773a", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:14b3ce427a4510613d52ed8996c11252bfd4c699fe419f59d0f8d01fa352ee0d", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:766335004d1a422d0e52b7df790affc33f3b8a1c41df4b1c1e9076da3ecb2d14", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:f830b5c0e0a610fb3a3b5a5c7060b007377e799baf151d4ac035da6940af0017", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:8ea825a62dfa32862de1d1efe92766e5d3032091bd3d1ac15719ceaa887395ee", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:04111d196f15e5e68df399887e38960656c279e376b392609021ce30a573d8a9", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:e64ee5c1aaf6bbe095e976ab4a48b70f7e5db8fe21c99ece4a513c289203047d", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:94f99134417edf1852d924b06de124bd848f6ffd1b087f8825236cbcd259c644", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:0add400128d772bf8ce0812f8aa300ef634feb8823a4059ceac50243effbfb4f", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:bd726f240b2c20abccd09a80e9275c5269a78d541563afeb1367fc8eccab53e7", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:c7928fa9fe6bad25ce503142bf164419bda4b2ecde96d7d7b276750b41dcd0fe", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:df09f8d23709092d641b045590da4aa41eaefc56d84c06279ff243ef74dfc029", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:bbfcd4ec68fa87b243b5c9089cf748e4b7b19c069a4b02a1235507aa7af0a6e5", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:075e270a22e227a7124131bd70523e73a4b9ffefea9d4f8d46273a262970196f", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:d443be9f15c80a37eb6827f2ff3c274bc1ef10526404a32401157f4b9be96c12", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:3a8bb3ed766ad233fe9869fa887ef16b2c6c505458e1879920962add2cfb5cdf", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:251d8e6bd3d7fe0d56adefc8d16199254f316a29933bb92d0da48838d641a084", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:42663bd3af6c64e89814c50f07e8a5fbf4a31a487d7ce94dd5fdee1e3781eddd", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:903eb396a96568f9d3b080bd0fe162810a44f834efcb2e2f61fba9e2fee1ae8d", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:ec65ee44da8241f3143480ffdfa949641dedd449219c3d82c904105ebe8768a3", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:422730aaaa5a4d0ab14be1e5eed77c4f3b3be74649795e1ab87e049c1d04bf21", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:2c6fc92c6020fc7719efbf8fe1f2cedfd6c4323a10ecefb5df1dcd3fd912db44", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:c69af5659fe541ee41e4c0769a0421fd5ce34e80fc6b10f5b940c5205f96649e", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:c31241c566fcf7d34b292eb3dfac06ead822130661772c73f099e639e7b5d661", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:af44f636dd29b36e60f3f79b1e2036fb2db53ff4117fd21afc73ac2c7f204fda", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:d8eecbd8ab122d8e9d5f475c265ad9bdf46cd155c497654b08e3b2cfc7f4e47c", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:71d81159fbf948ff16aef9dc2261fdd3358c37c0fd04b719be1e968323f4a0af", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:44d59d271674c102103448503f9eaa2534ff54e37dd7db198920db9be1acbc24", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:6e051766c809f284e286e91bcbb0d8296169466a95d8c90bea85df209a75f9c5", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:4c24a9a58d7830981677a80b1e4714fd8565ed41ad62f27eb586e8edb6dc41fa", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:b4183fd8c64c1b9f6079d8befcabf75baaa12152c4178be20137950461381eee", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:f8f382fa879bf907576966b217252baee971c3842eb24b80ae63306ab3aa03dc", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:b14eb0b895c1a5763193389e7be368e543b779d74ee8dae434c3b7c5723f98a0", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:db4743c9da0a9478a8f04b5ca595c5ad49b06cd47f3be4c3a27b71caaaa608aa", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:53e3b393bc37eef1a265c5a9323bead2990cdd4c895ce5b9eb1040f16caf463b", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:4e352fc53e907847636a60c5744bd48ec07d56f628a429dcd463a2aa9069f791", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:2c9bf6d19dc034b84db5cd2596236ae85f325456c75a93cff599e617b4098f09", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:7ca6bf88e460bca92a976ab1e14e12dc760c99bc64ef2c2b99a38e5578e6b5b3", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:acf3579918d33fd5b0cf6606ff538b9b8d6ff32fab2ad77cecb8af4185d193c9", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:96450a3c93601c436df8bc5e30bd924fd7cf3f93b81ffa94cc891cbf2fec3cf5", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:b29dbc05d760f12f9b3e106fde09c36a95e050c2f17b1e97b463a752ecdac07d", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:a6303328ab9263a684a959d7778ec0100fc040470f633fc9f7c35d110150eadf", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:3c5e0b1c6ffefeab4d4794237a2ce974f11fa2485ebd22454c3973cb84267102", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:b8d26609375710605b0a2944e3c3c9d17916809ba216ca4903ba4fd3b60c673b", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:5479de080dd6738a9cceadebbdbb5cd881dc2de24dd345de9b6465cf11207966", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:6673fb9d330ce94708727db8c4459e8db6c70f2796f3eca6cd2965346150d960", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:7d4584d9ddfb6b43441fa5b6b826252ac849059d5d2467624fe6576b3befdcc5", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:18b872d0c5a157da487cf5b0cde78815902f79ab1403513e5d225a9da0969111", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:e747723f87e55aa77abd1bab3c84c87e165e56ab16690cb5e78717f6b8e7b0db", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:05e146a2aa2274d04de3407604d55f4cc16218e0e661966b1fc1a4ba37417279", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:0908b0458d33ea144904285546a4ffa2876c1fafb3c19045a410248b06860cc0", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:26a7c0d9b5edacbe701090f02c8da3b23ceee1764c599ea3997ef8a019572390", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:7de741394b99cfe157eacd30d7fe9421897aef2492df65c08c5129123b853c28", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:511654f4f974efb3b10787b6ca395d83d11dda568d770d9ba5dbb5a6e49e98f0", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:c7fec612a8f6eb93af9684bea09e5f45114f9ac644d0c0ebfb2247e475191672", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:4a8999a2f3e0124301324542ea87326c4cb464ce47af07a02328a8c87685768e", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:a397f26620d827447a79234cf8c5ba828b947be721cce03b6c2c7ac8ae4c7ed9", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:df99885fdc3e195f901bca4c5a5b8e69cd9bc5e4fee4b7ef576254c3116a708d", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:364cb483443bb6e994cd2bf4a445f7f6b26426ceef313d5f18a2d67705adb2e7", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:919a9f08b8611ad2b886ec54c89fc6dc4e05575b1536b2e9fe9ad8ae4450702f", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:e0aff1e9512fc9eed6abff9d2ee0970d13c4930734a8a4ad6c43a3bdcc2b7509", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:c5b1e90b2d0d0d6566ba350b3e3a7d531f928765009b1c10258d4c1b29dc52c1", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:05d754d9c917a917fba979172b7dcfab1ece9bdba6bebf94cb3bc9706ea74078", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:ca34bba129f1b1940c0d2f9c8e0b127f5ffc7693e15e29a76ffd36bed26fc03a", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:5371a568a87e7c6db0545fb25a34e981c4bee6d7fa79d77412aed9041534da82", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:ed6fcc69bf86c664bcca043868c15f9c18d83e2d6668580b8c4ef3b6f9786318", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:525635ad9c177a6542047b5ec23171de1c8c688e63079b88431c3292b5d5eade", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:07229f11e8eee7f3b9b2a0f907f4071b6bc23e94b47dacf54d2553b71027395c", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e08da772343770e22511e9b04b4e52b9f496373c253337bacabf04962253a968", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:92b299c67dd5843c6032eab38eb20be08463ccdf14a42b2809c2955412caf663", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:1375835666f94917993270b5b31df2bf38588f0fbd74fdecfbda753bc83f7bf7", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ff129bad17f3e5f3f5e9b6d3d6838a6e89aa1e8e072396882e40603647029237", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:294711e55ab39c66e1a8dfcedc1ff3ee8cfc480bece6d8905dcda119d06bf3b5", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:8127ec95ce1e371db1630b8502f43e0f5efb7582d4e9a18a332b278bb2cfda85", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:a3c8f7843cd7b310d33e21349f2f95767f26dea37fd14c6c8b88428e2940a0dd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:6b03ca17e83e58108284e2e92cc0a3c05405e2fe73b1facb5246c42db33e99dc", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:96dc020ee16db478ef13f9ac3ce1d62732019c1f9502a6f5db239454ea70c83f", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:d9ec02335d573a34d3d76a19c2e71ecfafbd5e7f4f72fd8bc17efb16f89a5f83", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:5ba7c1ae6aa7f58222ea8d6945fce40b7fd1017af834f1ce29ff29738c34121a", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:ff60251151dcc519f6fb44040f63e5debd899d8d4a069c3c6d643fd52a0744be", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:c0bc362be2090a448ca8bda9e653e300ae2af9473c8cd889cfd2b9f9e794b31d", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:133ee1df94f11b9cbfef1f7321948784714b6c75eed48aef6f1408a0e6f86a43", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:f53b16670b35a8bb5093efef5e94310c138da3aa0e51a8a77c13ce09d1e9ea67", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:ef2430b54b610a1554b5193ecabfb41847e06358e2a2ce133cf65837e04b8726", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:164431e9599b4a92e479cb41c081b291e6ced3e00965b2599967d6d009f21668", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:b12384d89327b5e5d1a0cb99ab744b734c054a926afbd5c4b37b2a7dd46c1f32", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:a0c04d01ed55e10320bba1f42677cd889839091a0d8bc5a07e2d9d649570bd25", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:5448a42b95dc894a7c3731f89ec0c2f74ff1ca0940fe5a910b774968f1fe0afd", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:49b5d8c0efbb628586af7cad5e56d4dfbbde9de85067eeebfb3e77a8a098c2da", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:0b8558bc624ea6c226cf19ce8cb5caefaa0a6c9f47b4ac222cb639a5c087ecd3", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:19a4e9084136c92bebf7453d9a000ab2f8c7b3923d3e9555821c7f7e7c74823f", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:e99c54d578f3081319a92793ae32b42a633ab6ff441793d04a5561cb836aac79", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:48d77778810934535b9ea3b73eed355f610fea7e097afa1f715403b9153ec646", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:fffcecff5763f2432a48de987f9068144fd5b7e1e3d39a915df5252ccbd09c54", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:b5991add5f97fea260b00b5641356b2e0a8f8ac7cb38fb096fa0fa12257fda40", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:70b1f5282177eab34f2bf0b9ccd3d9e670db39b0e236db626d007142746f3de8", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:78a26c414b95f847ace2244ffd8cf2b331d0deb3dbf997b4418c04776bb286a9", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:416831481b889e80b383b31faa00f231b9783d0df2fad53b1586e93107531be3", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:998e50b2675e968844d88dcd800430d6321c802fc0c95e164ae42639702f3cb6", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:8379859f95c1c920f9980a7da51b11000106746567cf39eb11e991e630dc83b9", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:3a40a0b982db692f743fad699420776c4dfb330d1b9c7ec002539075fb499aba", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:7fafc6a9f66dbd5fa664670416e8992743834c2bdc87e8baa413349dfe59972a", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:c836cbc2f01e3b3e9b3ab2a228d66fd3397b33f915a0ec8558de6b083be6181b", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:84c955a978b33a5e0790ce78ea09a45aa35fd6604b49a975fda7c037ad1deba4", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:bd9f7634df97c85de82c4c6b2cfc420e24090f117898f9a89d4d930ee6757e4c", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Monday 18 November 2024 10:09:56 -0500 (0:00:04.724) 0:00:53.504 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Monday 18 November 2024 10:09:56 -0500 (0:00:00.083) 0:00:53.587 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Monday 18 November 2024 10:09:56 -0500 (0:00:00.065) 0:00:53.652 ******* skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] ********* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:58 Monday 18 November 2024 10:09:56 -0500 (0:00:00.104) 0:00:53.757 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] **** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:64 Monday 18 November 2024 10:09:56 -0500 (0:00:00.101) 0:00:53.858 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ******************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:69 Monday 18 November 2024 10:09:56 -0500 (0:00:00.098) 0:00:53.957 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] *************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:72 Monday 18 November 2024 10:09:56 -0500 (0:00:00.093) 0:00:54.050 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Cluster auth] ********************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:75 Monday 18 November 2024 10:09:56 -0500 (0:00:00.095) 0:00:54.146 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:80 Monday 18 November 2024 10:09:56 -0500 (0:00:00.147) 0:00:54.293 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable cluster services on boot] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:86 Monday 18 November 2024 10:09:57 -0500 (0:00:00.109) 0:00:54.403 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Start the cluster and reload corosync.conf] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:89 Monday 18 November 2024 10:09:57 -0500 (0:00:00.123) 0:00:54.526 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Create and push CIB] ************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:92 Monday 18 November 2024 10:09:57 -0500 (0:00:00.096) 0:00:54.623 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "ha_cluster_cluster_present | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:101 Monday 18 November 2024 10:09:57 -0500 (0:00:00.102) 0:00:54.725 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml:9 Monday 18 November 2024 10:09:57 -0500 (0:00:00.117) 0:00:54.843 ******* changed: [managed-node1] => (item=/etc/corosync/corosync.conf) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "pcs", "cluster", "destroy" ], "delta": "0:00:04.310003", "end": "2024-11-18 10:10:02.234019", "item": "/etc/corosync/corosync.conf", "rc": 0, "start": "2024-11-18 10:09:57.924016" } STDERR: Shutting down pacemaker/corosync services... Killing any remaining services... Removing all cluster configuration files... ok: [managed-node1] => (item=/var/lib/pacemaker/cib/cib.xml) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "pcs", "cluster", "destroy" ], "delta": null, "end": null, "item": "/var/lib/pacemaker/cib/cib.xml", "rc": 0, "start": null } STDOUT: skipped, since /var/lib/pacemaker/cib/cib.xml does not exist MSG: Did not run command since '/var/lib/pacemaker/cib/cib.xml' does not exist TASK [fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey] ******** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:104 Monday 18 November 2024 10:10:02 -0500 (0:00:05.213) 0:01:00.056 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/cluster/fence_xvm.key", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure qnetd] ****************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:109 Monday 18 November 2024 10:10:03 -0500 (0:00:00.486) 0:01:00.543 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml for managed-node1 TASK [fedora.linux_system_roles.ha_cluster : Remove qnetd configuration] ******* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3 Monday 18 November 2024 10:10:03 -0500 (0:00:00.096) 0:01:00.639 ******* skipping: [managed-node1] => { "changed": false, "false_condition": "( not ha_cluster_qnetd.present | d(false) or ha_cluster_qnetd.regenerate_keys | d(false) )", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Setup qnetd] ********************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:16 Monday 18 November 2024 10:10:03 -0500 (0:00:00.099) 0:01:00.739 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "pcs", "--start", "--", "qdevice", "setup", "model", "net" ], "delta": "0:00:01.456650", "end": "2024-11-18 10:10:05.312053", "failed_when_result": false, "rc": 0, "start": "2024-11-18 10:10:03.855403" } STDERR: Quorum device 'net' initialized Starting quorum device... quorum device started TASK [fedora.linux_system_roles.ha_cluster : Enable or disable qnetd service on boot] *** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:26 Monday 18 November 2024 10:10:05 -0500 (0:00:01.981) 0:01:02.720 ******* changed: [managed-node1] => { "changed": true, "enabled": true, "name": "corosync-qnetd", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestamp": "Mon 2024-11-18 10:10:05 EST", "ActiveEnterTimestampMonotonic": "1323271326", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target network-online.target sysinit.target -.mount systemd-journald.socket system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2024-11-18 10:10:05 EST", "AssertTimestampMonotonic": "1323230230", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "34664000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanClean": "runtime", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2024-11-18 10:10:05 EST", "ConditionTimestampMonotonic": "1323230187", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/corosync-qnetd.service", "ControlGroupId": "9584", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Corosync Qdevice Network daemon", "DevicePolicy": "auto", "Documentation": "man:corosync-qnetd", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/corosync-qnetd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "81673", "ExecMainStartTimestamp": "Mon 2024-11-18 10:10:05 EST", "ExecMainStartTimestampMonotonic": "1323235119", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/corosync-qnetd ; argv[]=/usr/bin/corosync-qnetd -f $COROSYNC_QNETD_OPTIONS ; ignore_errors=no ; start_time=[Mon 2024-11-18 10:10:05 EST] ; stop_time=[n/a] ; pid=81673 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/corosync-qnetd ; argv[]=/usr/bin/corosync-qnetd -f $COROSYNC_QNETD_OPTIONS ; flags= ; start_time=[Mon 2024-11-18 10:10:05 EST] ; stop_time=[n/a] ; pid=81673 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/corosync-qnetd.service", "FreezerState": "running", "GID": "993", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "corosync-qnetd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2024-11-18 10:10:05 EST", "InactiveExitTimestampMonotonic": "1323235483", "InvocationID": "edce4760008945678a762abb5a324b8a", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14750", "LimitNPROCSoft": "14750", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14750", "LimitSIGPENDINGSoft": "14750", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "81673", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "6672384", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "corosync-qnetd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target network-online.target -.mount system.slice", "RequiresMountsFor": "/run/corosync-qnetd", "Restart": "on-abnormal", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectory": "corosync-qnetd", "RuntimeDirectoryMode": "0770", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "18446744073709551615", "StartupMemoryLow": "0", "StartupMemoryMax": "18446744073709551615", "StartupMemorySwapMax": "18446744073709551615", "StartupMemoryZSwapMax": "18446744073709551615", "StateChangeTimestamp": "Mon 2024-11-18 10:10:05 EST", "StateChangeTimestampMonotonic": "1323271326", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "4425", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "994", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "coroqnetd", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [Get services status] ***************************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:36 Monday 18 November 2024 10:10:06 -0500 (0:00:01.160) 0:01:03.881 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "canberra-system-bootup.service": { "name": "canberra-system-bootup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown-reboot.service": { "name": "canberra-system-shutdown-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown.service": { "name": "canberra-system-shutdown.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qdevice.service": { "name": "corosync-qdevice.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "running", "status": "enabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "geoclue.service": { "name": "geoclue.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "low-memory-monitor.service": { "name": "low-memory-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rtkit-daemon.service": { "name": "rtkit-daemon.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "upower.service": { "name": "upower.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [Check services status] *************************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:39 Monday 18 November 2024 10:10:09 -0500 (0:00:02.755) 0:01:06.636 ******* ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Check firewall and selinux state] **************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:45 Monday 18 November 2024 10:10:09 -0500 (0:00:00.063) 0:01:06.700 ******* included: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml for managed-node1 TASK [Check firewall service status] ******************************************* task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml:6 Monday 18 November 2024 10:10:09 -0500 (0:00:00.112) 0:01:06.812 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "firewall-cmd", "--list-services" ], "delta": "0:00:00.263210", "end": "2024-11-18 10:10:10.167491", "failed_when_result": false, "rc": 0, "start": "2024-11-18 10:10:09.904281" } STDOUT: dhcpv6-client high-availability mdns ssh TASK [Check firewall port status] ********************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml:12 Monday 18 November 2024 10:10:10 -0500 (0:00:00.768) 0:01:07.581 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.268292", "end": "2024-11-18 10:10:10.942869", "failed_when_result": false, "rc": 0, "start": "2024-11-18 10:10:10.674577" } STDOUT: 1229/tcp TASK [Get associated selinux ports] ******************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml:25 Monday 18 November 2024 10:10:11 -0500 (0:00:00.787) 0:01:08.368 ******* ok: [managed-node1] => { "changed": false, "cmd": "set -euo pipefail\nfirewall-cmd --info-service=high-availability | egrep \" +ports: +\" | sed -e \"s/ *ports: //\"", "delta": "0:00:00.263113", "end": "2024-11-18 10:10:11.769754", "rc": 0, "start": "2024-11-18 10:10:11.506641" } STDOUT: 2224/tcp 3121/tcp 5403/tcp 5404/udp 5405-5412/udp 9929/tcp 9929/udp 21064/tcp STDERR: egrep: warning: egrep is obsolescent; using grep -E TASK [Check associated selinux ports] ****************************************** task path: /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml:33 Monday 18 November 2024 10:10:11 -0500 (0:00:00.814) 0:01:09.182 ******* ok: [managed-node1] => (item=2224/tcp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"2224\" | grep \"tcp\"", "delta": "0:00:00.877409", "end": "2024-11-18 10:10:13.220862", "item": "2224/tcp", "rc": 0, "start": "2024-11-18 10:10:12.343453" } STDOUT: cluster_port_t tcp 21064, 9929, 5403, 3121, 2224, 1229, 5149, 40040, 50006-50008 ok: [managed-node1] => (item=3121/tcp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"3121\" | grep \"tcp\"", "delta": "0:00:00.423825", "end": "2024-11-18 10:10:14.204303", "item": "3121/tcp", "rc": 0, "start": "2024-11-18 10:10:13.780478" } STDOUT: cluster_port_t tcp 21064, 9929, 5403, 3121, 2224, 1229, 5149, 40040, 50006-50008 ok: [managed-node1] => (item=5403/tcp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"5403\" | grep \"tcp\"", "delta": "0:00:00.424360", "end": "2024-11-18 10:10:15.113930", "item": "5403/tcp", "rc": 0, "start": "2024-11-18 10:10:14.689570" } STDOUT: cluster_port_t tcp 21064, 9929, 5403, 3121, 2224, 1229, 5149, 40040, 50006-50008 ok: [managed-node1] => (item=5404/udp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"5404\" | grep \"udp\"", "delta": "0:00:00.428626", "end": "2024-11-18 10:10:16.023934", "item": "5404/udp", "rc": 0, "start": "2024-11-18 10:10:15.595308" } STDOUT: cluster_port_t udp 9929, 5405-5412, 5404, 5149, 50006-50008 ok: [managed-node1] => (item=5405-5412/udp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"5405-5412\" | grep \"udp\"", "delta": "0:00:00.423554", "end": "2024-11-18 10:10:16.887978", "item": "5405-5412/udp", "rc": 0, "start": "2024-11-18 10:10:16.464424" } STDOUT: cluster_port_t udp 9929, 5405-5412, 5404, 5149, 50006-50008 ok: [managed-node1] => (item=9929/tcp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"9929\" | grep \"tcp\"", "delta": "0:00:00.424295", "end": "2024-11-18 10:10:17.781240", "item": "9929/tcp", "rc": 0, "start": "2024-11-18 10:10:17.356945" } STDOUT: cluster_port_t tcp 21064, 9929, 5403, 3121, 2224, 1229, 5149, 40040, 50006-50008 ok: [managed-node1] => (item=9929/udp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"9929\" | grep \"udp\"", "delta": "0:00:00.424778", "end": "2024-11-18 10:10:18.652459", "item": "9929/udp", "rc": 0, "start": "2024-11-18 10:10:18.227681" } STDOUT: cluster_port_t udp 9929, 5405-5412, 5404, 5149, 50006-50008 ok: [managed-node1] => (item=21064/tcp) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -euo pipefail\nsudo semanage port --list | grep cluster_port_t | grep \"21064\" | grep \"tcp\"", "delta": "0:00:00.425945", "end": "2024-11-18 10:10:19.508918", "item": "21064/tcp", "rc": 0, "start": "2024-11-18 10:10:19.082973" } STDOUT: cluster_port_t tcp 21064, 9929, 5403, 3121, 2224, 1229, 5149, 40040, 50006-50008 PLAY RECAP ********************************************************************* managed-node1 : ok=76 changed=8 unreachable=0 failed=0 skipped=68 rescued=0 ignored=0 Monday 18 November 2024 10:10:19 -0500 (0:00:07.704) 0:01:16.887 ******* =============================================================================== Check associated selinux ports ------------------------------------------ 7.70s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tasks/check_firewall_selinux.yml:33 fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 6.41s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 fedora.linux_system_roles.ha_cluster : Remove cluster configuration ----- 5.21s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-destroy-pcs-0.10.yml:9 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 4.72s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.ha_cluster : Install role essential packages --- 3.67s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 fedora.linux_system_roles.firewall : Install firewalld ------------------ 3.52s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.ha_cluster : Populate service facts ----------- 2.79s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 Get services status ----------------------------------------------------- 2.76s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:36 fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 2.48s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed --- 2.23s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 2.08s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.08s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.ha_cluster : Setup qnetd ---------------------- 1.98s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:16 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.94s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Gathering Facts --------------------------------------------------------- 1.61s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/tests/ha_cluster/tests_qnetd.yml:9 fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities ----------- 1.41s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities ---------- 1.38s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155 fedora.linux_system_roles.ha_cluster : Stop pcsd ------------------------ 1.23s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.20s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.firewall : Unmask firewalld service ----------- 1.18s /tmp/collections-o6g/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22