ansible-playbook [core 2.17.6] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-jt1 executable location = /usr/local/bin/ansible-playbook python version = 3.12.6 (main, Sep 9 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_auth.yml ************************************************ 2 plays in /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:5 Tuesday 19 November 2024 14:37:03 -0500 (0:00:00.008) 0:00:00.008 ****** ok: [managed-node1] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-EWf/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test authentication] ***************************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:9 Tuesday 19 November 2024 14:37:03 -0500 (0:00:00.017) 0:00:00.025 ****** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Stop test] *************************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:20 Tuesday 19 November 2024 14:37:04 -0500 (0:00:01.028) 0:00:01.054 ****** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [Get initial state of services] ******************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Tuesday 19 November 2024 14:37:04 -0500 (0:00:00.040) 0:00:01.094 ****** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run role] **************************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:30 Tuesday 19 November 2024 14:37:06 -0500 (0:00:01.759) 0:00:02.854 ****** included: fedora.linux_system_roles.metrics for managed-node1 TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.065) 0:00:02.920 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:8 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.022) 0:00:02.942 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:13 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.022) 0:00:02.964 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:02.983 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:03.001 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.019) 0:00:03.020 ****** ok: [managed-node1] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:35 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.052) 0:00:03.073 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:50 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:03.092 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:58 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:03.110 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:66 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:03.128 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:75 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.018) 0:00:03.147 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric collection service.] **************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:81 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.017) 0:00:03.164 ****** included: fedora.linux_system_roles.private_metrics_subrole_pcp for managed-node1 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set platform/version specific variables] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:4 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.067) 0:00:03.232 ****** ok: [managed-node1] => (item=/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" } ok: [managed-node1] => (item=/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml) => { "ansible_facts": { "__pcp_pmcd_defaults_path": "/etc/sysconfig/pmcd", "__pcp_pmlogger_defaults_path": "/etc/sysconfig/pmlogger", "__pcp_pmlogger_timers_path": "/etc/sysconfig/pmlogger_timers", "__pcp_pmproxy_defaults_path": "/etc/sysconfig/pmproxy" }, "ansible_included_var_files": [ "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" } skipping: [managed-node1] => (item=/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" } ok: [managed-node1] => (item=/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_9.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if system is ostree] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:18 Tuesday 19 November 2024 14:37:06 -0500 (0:00:00.055) 0:00:03.287 ****** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set flag to indicate system is ostree] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:23 Tuesday 19 November 2024 14:37:07 -0500 (0:00:00.449) 0:00:03.736 ****** ok: [managed-node1] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 Tuesday 19 November 2024 14:37:07 -0500 (0:00:00.043) 0:00:03.779 ****** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 Tuesday 19 November 2024 14:37:08 -0500 (0:00:01.484) 0:00:05.264 ****** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmcd] **** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:42 Tuesday 19 November 2024 14:37:10 -0500 (0:00:01.270) 0:00:06.535 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml for managed-node1 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : List optional metric collection agents to be enabled] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:4 Tuesday 19 November 2024 14:37:10 -0500 (0:00:00.038) 0:00:06.573 ****** skipping: [managed-node1] => { "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Extract metric collection configuration file content] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:9 Tuesday 19 November 2024 14:37:10 -0500 (0:00:00.018) 0:00:06.592 ****** ok: [managed-node1] => { "changed": false, "cmd": [ "cat", "/etc/pcp/pmcd/pmcd.conf" ], "delta": "0:00:00.003693", "end": "2024-11-19 14:37:10.488457", "rc": 0, "start": "2024-11-19 14:37:10.484764" } STDOUT: # # Name Id IPC IPC Params File/Cmd # Performance Metrics Domain Specifications # This file is automatically generated during the build root 1 pipe binary /var/lib/pcp/pmdas/root/pmdaroot pmcd 2 dso pmcd_init /var/lib/pcp/pmdas/pmcd/pmda_pmcd.so proc 3 pipe binary /var/lib/pcp/pmdas/proc/pmdaproc -d 3 pmproxy 4 dso pmproxy_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so xfs 11 pipe binary /var/lib/pcp/pmdas/xfs/pmdaxfs -d 11 linux 60 pipe binary /var/lib/pcp/pmdas/linux/pmdalinux nfsclient 62 pipe binary /usr/bin/python3 /var/lib/pcp/pmdas/nfsclient/pmdanfsclient.python mmv 70 dso mmv_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so kvm 95 pipe binary /var/lib/pcp/pmdas/kvm/pmdakvm -d 95 jbd2 122 dso jbd2_init /var/lib/pcp/pmdas/jbd2/pmda_jbd2.so dm 129 pipe binary /var/lib/pcp/pmdas/dm/pmdadm -d 129 openmetrics 144 pipe binary notready /usr/bin/python3 /var/lib/pcp/pmdas/openmetrics/pmdaopenmetrics.python [access] disallow ".*" : store; disallow ":*" : store; allow "local:*" : all; TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure optional metric collection agents are enabled] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:14 Tuesday 19 November 2024 14:37:10 -0500 (0:00:00.424) 0:00:07.016 ****** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure explicit metric label path exists] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:23 Tuesday 19 November 2024 14:37:10 -0500 (0:00:00.018) 0:00:07.035 ****** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels", "secontext": "system_u:object_r:etc_t:s0", "size": 45, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure implicit metric label path exists] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:31 Tuesday 19 November 2024 14:37:11 -0500 (0:00:00.506) 0:00:07.542 ****** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels/optional", "secontext": "system_u:object_r:etc_t:s0", "size": 29, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 Tuesday 19 November 2024 14:37:11 -0500 (0:00:00.380) 0:00:07.922 ****** ok: [managed-node1] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any implicit metric labels are configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:46 Tuesday 19 November 2024 14:37:12 -0500 (0:00:00.644) 0:00:08.567 ****** ok: [managed-node1] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/optional/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/optional/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 Tuesday 19 November 2024 14:37:12 -0500 (0:00:00.599) 0:00:09.167 ****** ok: [managed-node1] => { "changed": false, "checksum": "7518789c091387cd9c322e1a8fa8aad21d4efbd3", "dest": "/etc/sysconfig/pmcd", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmcd", "secontext": "system_u:object_r:etc_t:s0", "size": 1627, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 Tuesday 19 November 2024 14:37:13 -0500 (0:00:00.594) 0:00:09.761 ****** changed: [managed-node1] => (item={'user': 'pcptest', 'sasluser': 'pcptest', 'saslpassword': 't;dlen;dle'}) => { "ansible_loop_var": "item", "changed": true, "comment": "", "create_home": true, "group": 991, "home": "/home/pcptest", "item": { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" }, "name": "pcptest", "shell": "/bin/bash", "state": "present", "system": true, "uid": 991 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector SASL accounts are configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:68 Tuesday 19 November 2024 14:37:13 -0500 (0:00:00.622) 0:00:10.384 ****** ok: [managed-node1] => (item={'user': 'pcptest', 'sasluser': 'pcptest', 'saslpassword': 't;dlen;dle'}) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -eu\nif set -o | grep -q pipefail; then\n set -o pipefail # pipefail not supported on debian, some ubuntu\nfi\nif ! sasldblistusers2 -f \"/etc/pcp/passwd.db\" | grep -q \"^pcptest@\"; then\n echo \"Creating new pcptest user in /etc/pcp/passwd.db\"\n echo \"t;dlen;dle\" | saslpasswd2 -a pmcd \"pcptest\"\n chown root:pcp \"/etc/pcp/passwd.db\"\n chmod 640 \"/etc/pcp/passwd.db\"\nfi\n", "delta": "0:00:00.013805", "end": "2024-11-19 14:37:14.213028", "item": { "saslpassword": "t;dlen;dle", "sasluser": "pcptest", "user": "pcptest" }, "rc": 0, "start": "2024-11-19 14:37:14.199223" } STDOUT: Creating new pcptest user in /etc/pcp/passwd.db TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 Tuesday 19 November 2024 14:37:14 -0500 (0:00:00.360) 0:00:10.744 ****** ok: [managed-node1] => { "changed": false, "checksum": "615d2de55ab86108da0c7e6b64988fecb4169771", "dest": "/etc/sasl2/pmcd.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sasl2/pmcd.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 998, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmcd restart if needed] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:94 Tuesday 19 November 2024 14:37:14 -0500 (0:00:00.601) 0:00:11.345 ****** ok: [managed-node1] => { "ansible_facts": { "__pcp_restart_pmcd": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Report performance metric collector restart state] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:99 Tuesday 19 November 2024 14:37:14 -0500 (0:00:00.041) 0:00:11.387 ****** ok: [managed-node1] => {} MSG: ['optional_agents: False', 'explicit_labels: False', 'implicit_labels: False', 'defaults_config: False', 'authentication: False', 'restart_pmcd: False'] TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 Tuesday 19 November 2024 14:37:14 -0500 (0:00:00.037) 0:00:11.424 ****** ok: [managed-node1] => { "changed": false, "enabled": true, "name": "pmcd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:36:08 EST", "ActiveEnterTimestampMonotonic": "377714498", "ActiveExitTimestamp": "Tue 2024-11-19 14:36:07 EST", "ActiveExitTimestampMonotonic": "376808808", "ActiveState": "active", "After": "system.slice systemd-journald.socket avahi-daemon.service basic.target network-online.target sysinit.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:36:08 EST", "AssertTimestampMonotonic": "377534516", "Before": "pmie.service shutdown.target multi-user.target zabbix-agent.service pmproxy.service pmlogger.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "4371667000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:36:08 EST", "ConditionTimestampMonotonic": "377534513", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlGroupId": "4525", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmcd(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "18189", "ExecMainStartTimestamp": "Tue 2024-11-19 14:36:08 EST", "ExecMainStartTimestampMonotonic": "377714462", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:36:08 EST] ; stop_time=[n/a] ; pid=18115 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; flags= ; start_time=[Tue 2024-11-19 14:36:08 EST] ; stop_time=[n/a] ; pid=18115 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:36:08 EST", "InactiveEnterTimestampMonotonic": "377533865", "InactiveExitTimestamp": "Tue 2024-11-19 14:36:08 EST", "InactiveExitTimestampMonotonic": "377540440", "InvocationID": "0707949867a641c28a7e04227a9f83b6", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "18189", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "41955328", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmcd.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:36:08 EST", "StateChangeTimestampMonotonic": "377714498", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "9", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target pmie.service pmlogger.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is restarted and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:117 Tuesday 19 November 2024 14:37:15 -0500 (0:00:00.758) 0:00:12.183 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__pcp_restart_pmcd | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmie] **** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:45 Tuesday 19 November 2024 14:37:15 -0500 (0:00:00.018) 0:00:12.202 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml for managed-node1 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 Tuesday 19 November 2024 14:37:15 -0500 (0:00:00.042) 0:00:12.244 ****** ok: [managed-node1] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/network", "secontext": "system_u:object_r:etc_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/power", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf", "secontext": "system_u:object_r:etc_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/filesys", "secontext": "system_u:object_r:etc_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 Tuesday 19 November 2024 14:37:17 -0500 (0:00:01.367) 0:00:13.612 ****** ok: [managed-node1] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/network", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/power", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/zeroconf", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/filesys", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 Tuesday 19 November 2024 14:37:18 -0500 (0:00:01.378) 0:00:14.991 ****** ok: [managed-node1] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "checksum": "608d8a6ac6ee33bb86b77d28ba24fbcd378db43d", "dest": "/etc/pcp/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcplistenoverflows", "secontext": "system_u:object_r:etc_t:s0", "size": 971, "state": "file", "uid": 0 } ok: [managed-node1] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "checksum": "3256a5c2e8d07a20d8e97a08c0ab163252b0beae", "dest": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "secontext": "system_u:object_r:etc_t:s0", "size": 1131, "state": "file", "uid": 0 } ok: [managed-node1] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "checksum": "37b2bd7f2430bd9678ab078c5e69a53bea556524", "dest": "/etc/pcp/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldrops", "secontext": "system_u:object_r:etc_t:s0", "size": 1129, "state": "file", "uid": 0 } ok: [managed-node1] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "checksum": "1d53d6182709617c8f633339652d8d9e75f3b603", "dest": "/etc/pcp/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/power/thermal_throttle", "secontext": "system_u:object_r:etc_t:s0", "size": 1153, "state": "file", "uid": 0 } ok: [managed-node1] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "checksum": "65169db16dcaa224c211373001adc3addf1031c4", "dest": "/etc/pcp/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf/all_threads", "secontext": "system_u:object_r:etc_t:s0", "size": 840, "state": "file", "uid": 0 } ok: [managed-node1] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "checksum": "cd5d85dfb8eebd7d9737d56e78bd969dafa3999c", "dest": "/etc/pcp/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/filesys/vfs_files", "secontext": "system_u:object_r:etc_t:s0", "size": 969, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance rule actions are installed for targeted hosts] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:34 Tuesday 19 November 2024 14:37:22 -0500 (0:00:03.557) 0:00:18.548 ****** ok: [managed-node1] => { "ansible_facts": { "local_pmie": "default" }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global pmie webhook action is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:38 Tuesday 19 November 2024 14:37:22 -0500 (0:00:00.021) 0:00:18.570 ****** skipping: [managed-node1] => (item=default) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook action] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:50 Tuesday 19 November 2024 14:37:22 -0500 (0:00:00.025) 0:00:18.595 ****** skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'pcp_pmie_endpoint | length > 0', 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global webhook endpoint is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:63 Tuesday 19 November 2024 14:37:22 -0500 (0:00:00.031) 0:00:18.627 ****** ok: [managed-node1] => (item=default) => { "ansible_loop_var": "item", "backup": "", "changed": false, "found": 0, "item": "default" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook endpoint] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:73 Tuesday 19 November 2024 14:37:22 -0500 (0:00:00.440) 0:00:19.067 ****** skipping: [managed-node1] => (item={'changed': False, 'found': 0, 'msg': '', 'backup': '', 'diff': [{'before': '', 'after': '', 'before_header': '/var/lib/pcp/config/pmie/config.default (content)', 'after_header': '/var/lib/pcp/config/pmie/config.default (content)'}, {'before_header': '/var/lib/pcp/config/pmie/config.default (file attributes)', 'after_header': '/var/lib/pcp/config/pmie/config.default (file attributes)'}], 'invocation': {'module_args': {'state': 'absent', 'path': '/var/lib/pcp/config/pmie/config.default', 'regexp': '//.*global webhook_endpoint = ""', 'backrefs': False, 'create': False, 'backup': False, 'firstmatch': False, 'unsafe_writes': False, 'search_string': None, 'line': None, 'insertafter': None, 'insertbefore': None, 'validate': None, 'mode': None, 'owner': None, 'group': None, 'seuser': None, 'serole': None, 'selevel': None, 'setype': None, 'attributes': None}}, 'failed': False, 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "backup": "", "changed": false, "diff": [ { "after": "", "after_header": "/var/lib/pcp/config/pmie/config.default (content)", "before": "", "before_header": "/var/lib/pcp/config/pmie/config.default (content)" }, { "after_header": "/var/lib/pcp/config/pmie/config.default (file attributes)", "before_header": "/var/lib/pcp/config/pmie/config.default (file attributes)" } ], "failed": false, "found": 0, "invocation": { "module_args": { "attributes": null, "backrefs": false, "backup": false, "create": false, "firstmatch": false, "group": null, "insertafter": null, "insertbefore": null, "line": null, "mode": null, "owner": null, "path": "/var/lib/pcp/config/pmie/config.default", "regexp": "//.*global webhook_endpoint = \"\"", "search_string": null, "selevel": null, "serole": null, "setype": null, "seuser": null, "state": "absent", "unsafe_writes": false, "validate": null } }, "item": "default", "msg": "" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Tuesday 19 November 2024 14:37:22 -0500 (0:00:00.028) 0:00:19.095 ****** ok: [managed-node1] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 44, "src": "/etc/pcp/pmieconf/network/tcplistenoverflows", "state": "link", "uid": 0 } ok: [managed-node1] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 43, "src": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "state": "link", "uid": 0 } ok: [managed-node1] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 39, "src": "/etc/pcp/pmieconf/network/tcpqfulldrops", "state": "link", "uid": 0 } ok: [managed-node1] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 40, "src": "/etc/pcp/pmieconf/power/thermal_throttle", "state": "link", "uid": 0 } ok: [managed-node1] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 38, "src": "/etc/pcp/pmieconf/zeroconf/all_threads", "state": "link", "uid": 0 } ok: [managed-node1] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 35, "src": "/etc/pcp/pmieconf/filesys/vfs_files", "state": "link", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (with control.d)] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:95 Tuesday 19 November 2024 14:37:24 -0500 (0:00:01.974) 0:00:21.070 ****** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (single control)] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:106 Tuesday 19 November 2024 14:37:24 -0500 (0:00:00.023) 0:00:21.094 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmie restart if needed] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:116 Tuesday 19 November 2024 14:37:24 -0500 (0:00:00.022) 0:00:21.117 ****** ok: [managed-node1] => { "ansible_facts": { "__pcp_restart_pmie": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is running and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:120 Tuesday 19 November 2024 14:37:24 -0500 (0:00:00.032) 0:00:21.149 ****** ok: [managed-node1] => { "changed": false, "enabled": true, "name": "pmie", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:36:19 EST", "ActiveEnterTimestampMonotonic": "388441198", "ActiveExitTimestamp": "Tue 2024-11-19 14:36:18 EST", "ActiveExitTimestampMonotonic": "387995561", "ActiveState": "active", "After": "pcp-reboot-init.service basic.target system.slice systemd-journald.socket sysinit.target pmcd.service network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:36:19 EST", "AssertTimestampMonotonic": "388229892", "Before": "shutdown.target multi-user.target pmie_daily.timer pmie_check.timer pmie_farm.service", "BindsTo": "pmie_daily.timer pmie_check.timer pmie_farm.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "286897000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:36:19 EST", "ConditionTimestampMonotonic": "388229888", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlGroupId": "4562", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "\"man:pmie(1)\"", "DynamicUser": "no", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "22035", "ExecMainStartTimestamp": "Tue 2024-11-19 14:36:19 EST", "ExecMainStartTimestampMonotonic": "388441166", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:36:19 EST] ; stop_time=[n/a] ; pid=21854 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; flags= ; start_time=[Tue 2024-11-19 14:36:19 EST] ; stop_time=[n/a] ; pid=21854 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:36:19 EST", "InactiveEnterTimestampMonotonic": "388229415", "InactiveExitTimestamp": "Tue 2024-11-19 14:36:19 EST", "InactiveExitTimestampMonotonic": "388234371", "InvocationID": "b11284b554a74775953fe7f3cb77b101", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "22035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "1671168", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmie.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:36:19 EST", "StateChangeTimestampMonotonic": "388441198", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22329", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127 Tuesday 19 November 2024 14:37:25 -0500 (0:00:00.488) 0:00:21.638 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__pcp_restart_pmie | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmlogger] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:48 Tuesday 19 November 2024 14:37:25 -0500 (0:00:00.022) 0:00:21.661 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml for managed-node1 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure metric log location is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:4 Tuesday 19 November 2024 14:37:25 -0500 (0:00:00.051) 0:00:21.712 ****** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 Tuesday 19 November 2024 14:37:25 -0500 (0:00:00.372) 0:00:22.085 ****** ok: [managed-node1] => { "changed": false, "checksum": "67bc35973101c614e92b1990f8bebfffc39fe498", "dest": "/etc/sysconfig/pmlogger", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger", "secontext": "system_u:object_r:etc_t:s0", "size": 1180, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 Tuesday 19 November 2024 14:37:26 -0500 (0:00:00.608) 0:00:22.693 ****** ok: [managed-node1] => { "changed": false, "checksum": "df7bd3b5b6f1de3af164aab81441c7251a13a298", "dest": "/etc/sysconfig/pmlogger_timers", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger_timers", "secontext": "system_u:object_r:etc_t:s0", "size": 988, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (with control.d)] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:27 Tuesday 19 November 2024 14:37:26 -0500 (0:00:00.633) 0:00:23.327 ****** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (single control)] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:39 Tuesday 19 November 2024 14:37:26 -0500 (0:00:00.027) 0:00:23.355 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmlogger restart if needed] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:49 Tuesday 19 November 2024 14:37:26 -0500 (0:00:00.028) 0:00:23.383 ****** ok: [managed-node1] => { "ansible_facts": { "__pcp_restart_pmlogger": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is running and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:53 Tuesday 19 November 2024 14:37:26 -0500 (0:00:00.028) 0:00:23.411 ****** ok: [managed-node1] => { "changed": false, "enabled": true, "name": "pmlogger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:36:57 EST", "ActiveEnterTimestampMonotonic": "426740396", "ActiveExitTimestamp": "Tue 2024-11-19 14:36:57 EST", "ActiveExitTimestampMonotonic": "426020276", "ActiveState": "active", "After": "network-online.target systemd-journald.socket basic.target pmcd.service pcp-reboot-init.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:36:57 EST", "AssertTimestampMonotonic": "426092990", "Before": "pmlogger_farm.service pmlogger_daily.timer multi-user.target pmlogger_check.timer shutdown.target", "BindsTo": "pmlogger_farm.service pmlogger_daily.timer pmlogger_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "1161790000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:36:57 EST", "ConditionTimestampMonotonic": "426092987", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlGroupId": "4858", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "\"man:pmlogger(1)\"", "DynamicUser": "no", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "32157", "ExecMainStartTimestamp": "Tue 2024-11-19 14:36:57 EST", "ExecMainStartTimestampMonotonic": "426740358", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:36:57 EST] ; stop_time=[n/a] ; pid=31933 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; flags= ; start_time=[Tue 2024-11-19 14:36:57 EST] ; stop_time=[n/a] ; pid=31933 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:36:57 EST", "InactiveEnterTimestampMonotonic": "426092264", "InactiveExitTimestamp": "Tue 2024-11-19 14:36:57 EST", "InactiveExitTimestampMonotonic": "426100413", "InvocationID": "c0a14659ecdc4a2eb8b3aac4091aa759", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "32157", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "3092480", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmlogger.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:36:57 EST", "StateChangeTimestampMonotonic": "426740396", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22329", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is restarted and enabled on boot] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:60 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.488) 0:00:23.900 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__pcp_restart_pmlogger | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmproxy] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:51 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.025) 0:00:23.925 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "pcp_rest_api | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric graphing service.] ****************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:94 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.032) 0:00:23.958 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_graph_service | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Configure firewall] ****************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:102 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.032) 0:00:23.990 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml for managed-node1 TASK [fedora.linux_system_roles.metrics : Initialize __metrics_firewall] ******* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:9 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.041) 0:00:24.032 ****** ok: [managed-node1] => { "ansible_facts": { "__metrics_firewall": [] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Port for pmcd] *********************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:13 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.029) 0:00:24.061 ****** ok: [managed-node1] => { "ansible_facts": { "__metrics_firewall": [ { "port": "44321/tcp", "state": "enabled" } ] }, "changed": false } TASK [fedora.linux_system_roles.metrics : Port for pmproxy used by query and grafana] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:19 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.039) 0:00:24.101 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_graph_service | bool or metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for grafana] ***************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:25 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.028) 0:00:24.130 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_graph_service|bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for valkey] ****************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:31 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.025) 0:00:24.156 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for redis] ******************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:38 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.025) 0:00:24.182 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [Ensure the service and the port status with the firewall role] *********** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:45 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.039) 0:00:24.222 ****** included: fedora.linux_system_roles.firewall for managed-node1 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Tuesday 19 November 2024 14:37:27 -0500 (0:00:00.198) 0:00:24.420 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.076) 0:00:24.497 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.059) 0:00:24.557 ****** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.430) 0:00:24.987 ****** ok: [managed-node1] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.033) 0:00:25.020 ****** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.391) 0:00:25.412 ****** ok: [managed-node1] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Tuesday 19 November 2024 14:37:28 -0500 (0:00:00.032) 0:00:25.444 ****** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Tuesday 19 November 2024 14:37:30 -0500 (0:00:01.264) 0:00:26.709 ****** skipping: [managed-node1] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.032) 0:00:26.742 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.028) 0:00:26.770 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.033) 0:00:26.804 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.027) 0:00:26.831 ****** skipping: [managed-node1] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.055) 0:00:26.887 ****** ok: [managed-node1] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target dbus.socket dbus-broker.service basic.target polkit.service system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ebtables.service iptables.service shutdown.target ipset.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Tuesday 19 November 2024 14:37:30 -0500 (0:00:00.524) 0:00:27.412 ****** changed: [managed-node1] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service basic.target system.slice sysinit.target dbus.socket dbus-broker.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service ebtables.service ipset.service iptables.service shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Tuesday 19 November 2024 14:37:31 -0500 (0:00:00.977) 0:00:28.389 ****** ok: [managed-node1] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.9", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Tuesday 19 November 2024 14:37:31 -0500 (0:00:00.038) 0:00:28.427 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Tuesday 19 November 2024 14:37:31 -0500 (0:00:00.031) 0:00:28.458 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.038) 0:00:28.497 ****** changed: [managed-node1] => (item={'port': '44321/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "44321/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.760) 0:00:29.257 ****** skipping: [managed-node1] => (item={'port': '44321/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "port": "44321/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.056) 0:00:29.314 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.040) 0:00:29.354 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.033) 0:00:29.388 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.029) 0:00:29.417 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.027) 0:00:29.444 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Tuesday 19 November 2024 14:37:32 -0500 (0:00:00.027) 0:00:29.472 ****** skipping: [managed-node1] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.metrics : Configure selinux] ******************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:105 Tuesday 19 November 2024 14:37:33 -0500 (0:00:00.031) 0:00:29.503 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml for managed-node1 TASK [fedora.linux_system_roles.metrics : Set pcp_bind_all_unreserved_ports] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:6 Tuesday 19 November 2024 14:37:33 -0500 (0:00:00.036) 0:00:29.540 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [Ensure the port status with the selinux role] **************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:11 Tuesday 19 November 2024 14:37:33 -0500 (0:00:00.018) 0:00:29.558 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [Restart PMCD] ************************************************************ task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:35 Tuesday 19 November 2024 14:37:33 -0500 (0:00:00.031) 0:00:29.589 ****** ok: [managed-node1] => { "changed": false, "cmd": "systemctl restart pmcd && sleep 5", "delta": "0:00:06.083855", "end": "2024-11-19 14:37:39.480432", "rc": 0, "start": "2024-11-19 14:37:33.396577" } TASK [Check if SASL works] ***************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:40 Tuesday 19 November 2024 14:37:39 -0500 (0:00:06.420) 0:00:36.009 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_sasl.yml for managed-node1 => (item=check_sasl.yml) included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml for managed-node1 => (item=check_firewall_selinux.yml) TASK [Check if user exists] **************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_sasl.yml:3 Tuesday 19 November 2024 14:37:39 -0500 (0:00:00.055) 0:00:36.065 ****** ok: [managed-node1] => { "changed": false, "cmd": [ "id", "pcptest" ], "delta": "0:00:00.005675", "end": "2024-11-19 14:37:39.879700", "rc": 0, "start": "2024-11-19 14:37:39.874025" } STDOUT: uid=991(pcptest) gid=991(pcptest) groups=991(pcptest) TASK [Check if password is configured] ***************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_sasl.yml:7 Tuesday 19 November 2024 14:37:39 -0500 (0:00:00.349) 0:00:36.414 ****** ok: [managed-node1] => { "changed": false, "cmd": "set -euo pipefail\nsasldblistusers2 -f /etc/pcp/passwd.db | grep -wq \"pcptest\"\n", "delta": "0:00:00.007036", "end": "2024-11-19 14:37:40.247364", "rc": 0, "start": "2024-11-19 14:37:40.240328" } TASK [Check if a client can access metrics] ************************************ task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_sasl.yml:14 Tuesday 19 November 2024 14:37:40 -0500 (0:00:00.384) 0:00:36.799 ****** ok: [managed-node1] => { "changed": false, "cmd": [ "pminfo", "-f", "-h", "pcp://127.0.0.1?username=pcptest&password=t;dlen;dle", "disk.dev.read" ], "delta": "0:00:00.030027", "end": "2024-11-19 14:37:40.767230", "rc": 0, "start": "2024-11-19 14:37:40.737203" } STDOUT: disk.dev.read inst [0 or "xvda"] value 12787 TASK [Check firewall service status for grafana; metrics_manage_firewall is true] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:11 Tuesday 19 November 2024 14:37:40 -0500 (0:00:00.505) 0:00:37.305 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_graph_service|d(false)|bool", "skip_reason": "Conditional result was False" } TASK [Check firewall service status for redis; metrics_manage_firewall is true] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:34 Tuesday 19 November 2024 14:37:40 -0500 (0:00:00.047) 0:00:37.352 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_query_service|d(false)|bool", "skip_reason": "Conditional result was False" } TASK [Check firewall port status for pmproxy; metrics_manage_firewall is true] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:45 Tuesday 19 November 2024 14:37:40 -0500 (0:00:00.034) 0:00:37.387 ****** skipping: [managed-node1] => { "changed": false, "false_condition": "metrics_graph_service|d(false)|bool or metrics_query_service|d(false)|bool", "skip_reason": "Conditional result was False" } TASK [Check firewall port status for pmcd; metrics_manage_firewall is true] **** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:54 Tuesday 19 November 2024 14:37:40 -0500 (0:00:00.036) 0:00:37.424 ****** ok: [managed-node1] => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.206161", "end": "2024-11-19 14:37:41.517460", "failed_when_result": false, "rc": 0, "start": "2024-11-19 14:37:41.311299" } STDOUT: 44321/tcp TASK [Check firewall port status for keyserver; metrics_manage_firewall is true] *** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:60 Tuesday 19 November 2024 14:37:41 -0500 (0:00:00.627) 0:00:38.051 ****** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": [ "firewall-cmd", "--list-ports" ], "delta": "0:00:00.203652", "end": "2024-11-19 14:37:42.082574", "failed_when_result": true, "rc": 0, "start": "2024-11-19 14:37:41.878922" } STDOUT: 44321/tcp TASK [Handle failure case] ***************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:50 Tuesday 19 November 2024 14:37:42 -0500 (0:00:00.563) 0:00:38.615 ****** included: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node1 TASK [Collect logs] ************************************************************ task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Tuesday 19 November 2024 14:37:42 -0500 (0:00:00.041) 0:00:38.656 ****** ok: [managed-node1] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.047085", "end": "2024-11-19 14:37:42.511122", "rc": 0, "start": "2024-11-19 14:37:42.464037" } STDOUT: Nov 19 14:30:04 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@efi_pstore.service has successfully entered the 'dead' state. Nov 19 14:30:04 localhost systemd[1]: Finished Load Kernel Module efi_pstore. ░░ Subject: A start job for unit modprobe@efi_pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@efi_pstore.service has finished successfully. ░░ ░░ The job identifier is 166. Nov 19 14:30:04 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 143. Nov 19 14:30:04 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore). ░░ Subject: A start job for unit systemd-pstore.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pstore.service has finished successfully. ░░ ░░ The job identifier is 165. Nov 19 14:30:04 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 143. Nov 19 14:30:04 localhost kernel: fuse: init (API version 7.36) Nov 19 14:30:04 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. Nov 19 14:30:04 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 132. Nov 19 14:30:04 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 131. Nov 19 14:30:04 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 131. Nov 19 14:30:04 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Nov 19 14:30:04 localhost kernel: ACPI: bus type drm_connector registered Nov 19 14:30:04 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 217. Nov 19 14:30:04 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 136. Nov 19 14:30:04 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 128. Nov 19 14:30:04 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 126. Nov 19 14:30:04 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 150. Nov 19 14:30:04 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 130. Nov 19 14:30:04 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 175. Nov 19 14:30:04 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 119. Nov 19 14:30:04 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 161. Nov 19 14:30:04 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 156. Nov 19 14:30:04 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 145. Nov 19 14:30:04 localhost bootctl[492]: Couldn't find EFI system partition, skipping. Nov 19 14:30:04 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 152. Nov 19 14:30:04 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 161. Nov 19 14:30:04 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 156. Nov 19 14:30:04 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Nov 19 14:30:04 localhost systemd-udevd[496]: Using default interface naming scheme 'rhel-9.0'. Nov 19 14:30:04 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 152. Nov 19 14:30:04 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 264. Nov 19 14:30:04 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Nov 19 14:30:04 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 264. Nov 19 14:30:04 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 233. Nov 19 14:30:04 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 145. Nov 19 14:30:04 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 225. Nov 19 14:30:04 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 215. Nov 19 14:30:04 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 206. Nov 19 14:30:04 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 125. Nov 19 14:30:04 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 159. Nov 19 14:30:05 localhost auditd[530]: No plugins found, not dispatching events Nov 19 14:30:05 localhost auditd[530]: Init complete, auditd 3.1.5 listening for events (startup state enable) Nov 19 14:30:05 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 206. Nov 19 14:30:05 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Nov 19 14:30:05 localhost systemd-udevd[497]: Network interface NamePolicy= disabled on kernel command line. Nov 19 14:30:05 localhost kernel: RPC: Registered named UNIX socket transport module. Nov 19 14:30:05 localhost kernel: RPC: Registered udp transport module. Nov 19 14:30:05 localhost kernel: RPC: Registered tcp transport module. Nov 19 14:30:05 localhost kernel: RPC: Registered tcp-with-tls transport module. Nov 19 14:30:05 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Nov 19 14:30:05 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 225. Nov 19 14:30:05 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 224. Nov 19 14:30:05 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Nov 19 14:30:05 localhost augenrules[533]: /sbin/augenrules: No change Nov 19 14:30:05 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Nov 19 14:30:05 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Nov 19 14:30:05 localhost kernel: Console: switching to colour dummy device 80x25 Nov 19 14:30:05 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Nov 19 14:30:05 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Nov 19 14:30:05 localhost kernel: Console: switching to colour frame buffer device 128x48 Nov 19 14:30:05 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Nov 19 14:30:05 localhost augenrules[561]: No rules Nov 19 14:30:05 localhost augenrules[561]: enabled 1 Nov 19 14:30:05 localhost augenrules[561]: failure 1 Nov 19 14:30:05 localhost augenrules[561]: pid 530 Nov 19 14:30:05 localhost augenrules[561]: rate_limit 0 Nov 19 14:30:05 localhost augenrules[561]: backlog_limit 8192 Nov 19 14:30:05 localhost augenrules[561]: lost 0 Nov 19 14:30:05 localhost augenrules[561]: backlog 4 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time 60000 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time_actual 0 Nov 19 14:30:05 localhost augenrules[561]: enabled 1 Nov 19 14:30:05 localhost augenrules[561]: failure 1 Nov 19 14:30:05 localhost augenrules[561]: pid 530 Nov 19 14:30:05 localhost augenrules[561]: rate_limit 0 Nov 19 14:30:05 localhost augenrules[561]: backlog_limit 8192 Nov 19 14:30:05 localhost augenrules[561]: lost 0 Nov 19 14:30:05 localhost augenrules[561]: backlog 4 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time 60000 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time_actual 0 Nov 19 14:30:05 localhost augenrules[561]: enabled 1 Nov 19 14:30:05 localhost augenrules[561]: failure 1 Nov 19 14:30:05 localhost augenrules[561]: pid 530 Nov 19 14:30:05 localhost augenrules[561]: rate_limit 0 Nov 19 14:30:05 localhost augenrules[561]: backlog_limit 8192 Nov 19 14:30:05 localhost augenrules[561]: lost 0 Nov 19 14:30:05 localhost augenrules[561]: backlog 4 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time 60000 Nov 19 14:30:05 localhost augenrules[561]: backlog_wait_time_actual 0 Nov 19 14:30:05 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 215. Nov 19 14:30:05 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 214. Nov 19 14:30:05 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 214. Nov 19 14:30:05 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 118. Nov 19 14:30:05 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 186. Nov 19 14:30:05 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 193. Nov 19 14:30:05 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 194. Nov 19 14:30:05 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 185. Nov 19 14:30:05 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 190. Nov 19 14:30:05 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 198. Nov 19 14:30:05 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 195. Nov 19 14:30:05 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 191. Nov 19 14:30:05 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 172. Nov 19 14:30:06 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 191. Nov 19 14:30:06 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 117. Nov 19 14:30:06 localhost dbus-broker-lau[572]: Ready Nov 19 14:30:06 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 203. Nov 19 14:30:06 localhost systemd[1]: Starting Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 243. Nov 19 14:30:06 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 178. Nov 19 14:30:06 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 202. Nov 19 14:30:06 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 183. Nov 19 14:30:06 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 219. Nov 19 14:30:06 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 209. Nov 19 14:30:06 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 211. Nov 19 14:30:06 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 212. Nov 19 14:30:06 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 208. Nov 19 14:30:06 localhost /usr/sbin/irqbalance[582]: libcap-ng used by "/usr/sbin/irqbalance" failed dropping bounding set due to not having CAP_SETPCAP in capng_apply Nov 19 14:30:06 localhost rngd[583]: Disabling 7: PKCS11 Entropy generator (pkcs11) Nov 19 14:30:06 localhost rngd[583]: Disabling 5: NIST Network Entropy Beacon (nist) Nov 19 14:30:06 localhost rngd[583]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Nov 19 14:30:06 localhost rngd[583]: Initializing available sources Nov 19 14:30:06 localhost rngd[583]: [hwrng ]: Initialization Failed Nov 19 14:30:06 localhost rngd[583]: [rdrand]: Enabling RDRAND rng support Nov 19 14:30:06 localhost rngd[583]: [rdrand]: Initialized Nov 19 14:30:06 localhost rngd[583]: [jitter]: JITTER timeout set to 5 sec Nov 19 14:30:06 localhost rngd[583]: [jitter]: Initializing AES buffer Nov 19 14:30:06 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 239. Nov 19 14:30:06 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 240. Nov 19 14:30:06 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 216. Nov 19 14:30:06 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 269. Nov 19 14:30:06 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 178. Nov 19 14:30:06 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Nov 19 14:30:06 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 269. Nov 19 14:30:06 localhost systemd-logind[588]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Nov 19 14:30:06 localhost systemd-logind[588]: Watching system buttons on /dev/input/event0 (Power Button) Nov 19 14:30:06 localhost systemd-logind[588]: Watching system buttons on /dev/input/event1 (Sleep Button) Nov 19 14:30:06 localhost systemd-logind[588]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Nov 19 14:30:06 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 216. Nov 19 14:30:06 localhost chronyd[593]: chronyd version 4.6 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Nov 19 14:30:06 localhost chronyd[593]: Loaded 0 symmetric keys Nov 19 14:30:06 localhost chronyd[593]: Using right/UTC timezone to obtain leap second data Nov 19 14:30:06 localhost chronyd[593]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Nov 19 14:30:06 localhost chronyd[593]: Loaded seccomp filter (level 2) Nov 19 14:30:06 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 203. Nov 19 14:30:10 localhost cloud-init[597]: Cloud-init v. 23.4-19.el9 running 'init-local' at Tue, 19 Nov 2024 19:30:10 +0000. Up 19.21 seconds. Nov 19 14:30:10 localhost dhclient[600]: Internet Systems Consortium DHCP Client 4.4.2b1 Nov 19 14:30:10 localhost dhclient[600]: Copyright 2004-2019 Internet Systems Consortium. Nov 19 14:30:10 localhost dhclient[600]: All rights reserved. Nov 19 14:30:10 localhost dhclient[600]: For info, please visit https://www.isc.org/software/dhcp/ Nov 19 14:30:10 localhost dhclient[600]: Nov 19 14:30:10 localhost dhclient[600]: Listening on LPF/eth0/12:d8:6e:9b:2b:97 Nov 19 14:30:10 localhost dhclient[600]: Sending on LPF/eth0/12:d8:6e:9b:2b:97 Nov 19 14:30:10 localhost dhclient[600]: Sending on Socket/fallback Nov 19 14:30:10 localhost dhclient[600]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 5 (xid=0x87276077) Nov 19 14:30:10 localhost dhclient[600]: DHCPOFFER of 10.31.9.203 from 10.31.8.1 Nov 19 14:30:10 localhost dhclient[600]: DHCPREQUEST for 10.31.9.203 on eth0 to 255.255.255.255 port 67 (xid=0x87276077) Nov 19 14:30:10 localhost dhclient[600]: DHCPACK of 10.31.9.203 from 10.31.8.1 (xid=0x87276077) Nov 19 14:30:10 localhost dhclient[600]: bound to 10.31.9.203 -- renewal in 1487 seconds. Nov 19 14:30:11 localhost rngd[583]: [jitter]: Unable to obtain AES key, disabling JITTER source Nov 19 14:30:11 localhost rngd[583]: [jitter]: Initialization Failed Nov 19 14:30:11 localhost rngd[583]: [namedpipe]: Initialization Failed Nov 19 14:30:11 localhost rngd[583]: Process privileges have been dropped to 2:2 Nov 19 14:30:11 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 334. Nov 19 14:30:11 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 334. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-hostnamed[615]: Hostname set to (static) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 243. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 177. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 189. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3375] NetworkManager (version 1.51.2-2.el9) is starting... (boot:87761b0d-41cd-44d7-8e06-bba89f30da59) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3378] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3501] manager[0x557fe4f57080]: monitoring kernel firmware directory '/lib/firmware'. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3542] hostname: hostname: using hostnamed Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3543] hostname: static hostname changed from (none) to "ip-10-31-9-203.us-east-1.aws.redhat.com" Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.3549] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4554] manager[0x557fe4f57080]: rfkill: Wi-Fi hardware radio set enabled Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4554] manager[0x557fe4f57080]: rfkill: WWAN hardware radio set enabled Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4688] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.51.2-2.el9/libnm-device-plugin-team.so) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4690] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4692] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4694] manager: Networking is enabled by state file Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.4723] settings: Loaded settings plugin: keyfile (internal) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 467. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 401. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 401. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.6207] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.51.2-2.el9/libnm-settings-plugin-ifcfg-rh.so") Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7900] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7922] dhcp: init: Using DHCP client 'internal' Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7925] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7940] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7955] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7961] device (lo): Activation: starting connection 'lo' (263786fb-e4f2-42de-ac3a-dd42f70ef562) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7967] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7970] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 189. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.7987] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 192. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8015] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8018] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8020] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8036] device (eth0): carrier: link connected Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8039] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8045] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8063] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8068] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8070] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8073] manager: NetworkManager state is now CONNECTING Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8075] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8081] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8085] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 188. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 227. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8194] dhcp4 (eth0): state changed new lease, address=10.31.9.203 Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8201] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8286] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8320] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8327] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8331] device (lo): Activation: successful, device activated. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8382] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8384] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8387] manager: NetworkManager state is now CONNECTED_SITE Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8389] device (eth0): Activation: successful, device activated. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8395] manager: NetworkManager state is now CONNECTED_GLOBAL Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com NetworkManager[619]: [1732044611.8479] manager: startup complete Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 188. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 242. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Added source 10.11.160.238 Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Added source 10.18.100.10 Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Added source 10.2.32.37 Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Added source 10.2.32.38 Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 227. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 223. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 221. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 229. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 220. Nov 19 14:30:11 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 134. Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Cloud-init v. 23.4-19.el9 running 'init' at Tue, 19 Nov 2024 19:30:12 +0000. Up 21.12 seconds. Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | eth0 | True | 10.31.9.203 | 255.255.252.0 | global | 12:d8:6e:9b:2b:97 | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | eth0 | True | fe80::10d8:6eff:fe9b:2b97/64 | . | link | 12:d8:6e:9b:2b:97 | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | lo | True | ::1/128 | . | host | . | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info++++++++++++++++++++++++++++ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | 0 | 0.0.0.0 | 10.31.8.1 | 0.0.0.0 | eth0 | UG | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | 1 | 10.31.8.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+-----------+---------------+-----------+-------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | Route | Destination | Gateway | Interface | Flags | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: | 3 | multicast | :: | eth0 | U | Nov 19 14:30:12 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: ci-info: +-------+-------------+---------+-----------+-------+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Generating public/private rsa key pair. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key fingerprint is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: SHA256:n9aj+eDxXx8kydxigjnerKPbMi15BJOb9zq6dPy37RA root@ip-10-31-9-203.us-east-1.aws.redhat.com Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key's randomart image is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +---[RSA 3072]----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | + o o o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | S+ . E o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | oo++oo = | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .=+Boo. ..| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .=oBoB oo.o| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | =X+Bo+o+o.| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +----[SHA256]-----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Generating public/private dsa key pair. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key fingerprint is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: SHA256:RHEoQZbLGCt1rx4TethceqBMoefbQwAsZa7p9NlmKXU root@ip-10-31-9-203.us-east-1.aws.redhat.com Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key's randomart image is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +---[DSA 1024]----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | oo .+ooo. | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: |.oo +.+... | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | ..+ B +. | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | oo * =.o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: |o. * O ES | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: |o . X % . | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . + @ + | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | = + | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +----[SHA256]-----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Generating public/private ecdsa key pair. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key fingerprint is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: SHA256:mTTURh/Z9AvAm+eM9BUxnK2Yl9Cmkmf61MPZAxqd34Y root@ip-10-31-9-203.us-east-1.aws.redhat.com Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key's randomart image is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +---[ECDSA 256]---+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .oo..=oo+| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . oo+.+=o| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | o. .=B.oo| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . +o=*==..| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | S .=O++=.| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .o.+Eo+| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | o o.| Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +----[SHA256]-----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Generating public/private ed25519 key pair. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key fingerprint is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: SHA256:z+LLZzO2Ae7TwGZ5rOzFXe4ASdTk1ozJo20Px7Irp2g root@ip-10-31-9-203.us-east-1.aws.redhat.com Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: The key's randomart image is: Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +--[ED25519 256]--+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .o. | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . o.= | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . B o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | . = o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .So+ = + | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | .*=o+ O | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | +o=* + o | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | +E+O..+ | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: | oB*oB. . | Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[710]: +----[SHA256]-----+ Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 242. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 246. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 187. Nov 19 14:30:13 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 245. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 250. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 249. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 228. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 247. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sm-notify[789]: Version 2.5.4 starting Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 207. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 228. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[791]: Server listening on 0.0.0.0 port 22. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[791]: Server listening on :: port 22. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 207. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com rsyslogd[790]: [origin software="rsyslogd" swVersion="8.2310.0-4.el9" x-pid="790" x-info="https://www.rsyslog.com"] start Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 247. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[806]: Cloud-init v. 23.4-19.el9 running 'modules:config' at Tue, 19 Nov 2024 19:30:14 +0000. Up 23.13 seconds. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 249. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com rsyslogd[790]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 488. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[791]: Received signal 15; terminating. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 488 and the job result is done. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 558 and the job result is done. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 558. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 554. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 556. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 557. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 558. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 488. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[827]: Server listening on 0.0.0.0 port 22. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[827]: Server listening on :: port 22. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 488. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 245. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 244. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 201. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 201. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 238. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com crond[830]: (CRON) STARTUP (1.5.7) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com crond[830]: (CRON) INFO (Syslog will be used instead of sendmail.) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com crond[830]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 81% if used.) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com crond[830]: (CRON) INFO (running with inotify support) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 236. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 231. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 230. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 116. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 213. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 213. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[855]: Cloud-init v. 23.4-19.el9 running 'modules:final' at Tue, 19 Nov 2024 19:30:14 +0000. Up 23.72 seconds. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[857]: ############################################################# Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[859]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[862]: 1024 SHA256:RHEoQZbLGCt1rx4TethceqBMoefbQwAsZa7p9NlmKXU root@ip-10-31-9-203.us-east-1.aws.redhat.com (DSA) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[864]: 256 SHA256:mTTURh/Z9AvAm+eM9BUxnK2Yl9Cmkmf61MPZAxqd34Y root@ip-10-31-9-203.us-east-1.aws.redhat.com (ECDSA) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[866]: 256 SHA256:z+LLZzO2Ae7TwGZ5rOzFXe4ASdTk1ozJo20Px7Irp2g root@ip-10-31-9-203.us-east-1.aws.redhat.com (ED25519) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[868]: 3072 SHA256:n9aj+eDxXx8kydxigjnerKPbMi15BJOb9zq6dPy37RA root@ip-10-31-9-203.us-east-1.aws.redhat.com (RSA) Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[869]: -----END SSH HOST KEY FINGERPRINTS----- Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[870]: ############################################################# Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com cloud-init[855]: Cloud-init v. 23.4-19.el9 finished at Tue, 19 Nov 2024 19:30:14 +0000. Datasource DataSourceEc2Local. Up 23.86 seconds Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 244. Nov 19 14:30:14 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 241. Nov 19 14:30:15 ip-10-31-9-203.us-east-1.aws.redhat.com restraintd[811]: Listening on http://localhost:8081 Nov 19 14:30:15 ip-10-31-9-203.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Detected change(s) in the following file(s): /etc/fstab Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 0 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 0 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 48 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 48 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 49 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 49 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 50 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 50 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 51 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 51 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 52 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 52 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 53 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 53 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 54 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 54 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 55 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 55 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 56 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 56 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 57 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 57 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 58 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 58 affinity is now unmanaged Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 59 affinity: Input/output error Nov 19 14:30:16 ip-10-31-9-203.us-east-1.aws.redhat.com irqbalance[582]: IRQ 59 affinity is now unmanaged Nov 19 14:30:17 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Selected source 216.229.0.50 (2.centos.pool.ntp.org) Nov 19 14:30:17 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: System clock TAI offset set to 37 seconds Nov 19 14:30:19 ip-10-31-9-203.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Nov 19 14:30:19 ip-10-31-9-203.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Rebuilding /boot/initramfs-5.14.0-522.el9.x86_64kdump.img Nov 19 14:30:20 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1427]: dracut-057-70.git20240819.el9 Nov 19 14:30:20 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/12106a9f-8b78-4fb4-964b-200fca3a6310 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-5.14.0-522.el9.x86_64kdump.img 5.14.0-522.el9.x86_64 Nov 19 14:30:21 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Selected source 71.185.197.34 (2.centos.pool.ntp.org) Nov 19 14:30:21 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:22 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Module 'resume' will not be installed, because it's in the list to be omitted! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: memstrack is not available Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: memstrack is not available Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: systemd *** Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: systemd-initrd *** Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: nss-softokn *** Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: rngd *** Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: i18n *** Nov 19 14:30:23 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: drm *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: prefixdevname *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: kernel-modules *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: kernel-modules-extra *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: fstab-sys *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: rootfs-block *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: terminfo *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: udev-rules *** Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Skipping udev rule: 91-permissions.rules Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Skipping udev rule: 80-drivers-modprobe.rules Nov 19 14:30:24 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: dracut-systemd *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: usrmount *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: base *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: fs-lib *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: kdumpbase *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: microcode_ctl-fw_dir_override *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl module: mangling fw_dir Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: intel: caveats check for kernel version "5.14.0-522.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-2d-07" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-4e-03" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-4f-01" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-55-04" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-5e-03" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-8c-01" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: shutdown *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including module: squash *** Nov 19 14:30:25 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Including modules done *** Nov 19 14:30:26 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Installing kernel module dependencies *** Nov 19 14:30:26 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Installing kernel module dependencies done *** Nov 19 14:30:26 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Resolving executable dependencies *** Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Resolving executable dependencies done *** Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Hardlinking files *** Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Mode: real Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Files: 433 Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Linked: 1 files Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Compared: 0 xattrs Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Compared: 7 files Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Saved: 56.15 KiB Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Duration: 0.006921 seconds Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Hardlinking files done *** Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Generating early-microcode cpio image *** Nov 19 14:30:27 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Constructing GenuineIntel.bin *** Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Constructing GenuineIntel.bin *** Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Store current command line parameters *** Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: Stored kernel commandline: Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: No dracut internal kernel commandline stored in the initramfs Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Install squash loader *** Nov 19 14:30:28 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Stripping files *** Nov 19 14:30:29 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Stripping files done *** Nov 19 14:30:29 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Squashing the files inside the initramfs *** Nov 19 14:30:34 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Squashing the files inside the initramfs done *** Nov 19 14:30:34 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Creating image file '/boot/initramfs-5.14.0-522.el9.x86_64kdump.img' *** Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com dracut[1429]: *** Creating initramfs image file '/boot/initramfs-5.14.0-522.el9.x86_64kdump.img' done *** Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com kdumpctl[794]: kdump: kexec: loaded kdump kernel Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Starting kdump: [OK] Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Notice: No vmcore creation test performed! Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 250. Nov 19 14:30:35 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.194s (kernel) + 3.306s (initrd) + 40.309s (userspace) = 44.811s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1194918 microseconds. ░░ ░░ Initrd start-up required 3306979 microseconds. ░░ ░░ Userspace start-up required 40309401 microseconds. Nov 19 14:30:41 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 19 14:31:23 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Selected source 69.89.207.99 (2.centos.pool.ntp.org) Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4070]: Accepted publickey for root from 10.30.33.134 port 59118 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 560. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 564. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4070. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 564. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 559. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Queued start job for default target Main User Target. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 8. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[4074]: Startup finished in 73ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 73157 microseconds. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 559. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 627. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4070]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4083]: Received disconnect from 10.30.33.134 port 59118:11: disconnected by user Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4083]: Disconnected from user root 10.30.33.134 port 59118 Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4070]: pam_unix(sshd:session): session closed for user root Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: Session 1 logged out. Waiting for processes to exit. Nov 19 14:33:32 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4112]: Accepted publickey for root from 10.31.11.86 port 46736 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4113]: Accepted publickey for root from 10.31.11.86 port 46744 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4112. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 696. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4113. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 765. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4112]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4113]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4119]: Received disconnect from 10.31.11.86 port 46744:11: disconnected by user Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4119]: Disconnected from user root 10.31.11.86 port 46744 Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com sshd[4113]: pam_unix(sshd:session): session closed for user root Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: Session 4 logged out. Waiting for processes to exit. Nov 19 14:33:34 ip-10-31-9-203.us-east-1.aws.redhat.com systemd-logind[588]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Nov 19 14:33:35 ip-10-31-9-203.us-east-1.aws.redhat.com chronyd[593]: Selected source 71.185.197.34 (2.centos.pool.ntp.org) Nov 19 14:34:17 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 835. Nov 19 14:34:17 ip-10-31-9-203.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 835. Nov 19 14:34:17 managed-node1 systemd-hostnamed[5445]: Hostname set to (static) Nov 19 14:34:17 managed-node1 NetworkManager[619]: [1732044857.9728] hostname: static hostname changed from "ip-10-31-9-203.us-east-1.aws.redhat.com" to "managed-node1" Nov 19 14:34:17 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 901. Nov 19 14:34:17 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 901. Nov 19 14:34:28 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 19 14:34:48 managed-node1 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Nov 19 14:35:17 managed-node1 sshd[6091]: Accepted publickey for root from 10.31.41.102 port 60920 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 19 14:35:17 managed-node1 systemd-logind[588]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6091. Nov 19 14:35:17 managed-node1 systemd[1]: Started Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 967. Nov 19 14:35:17 managed-node1 sshd[6091]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Nov 19 14:35:19 managed-node1 python3.9[6219]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 19 14:35:19 managed-node1 python3.9[6352]: ansible-service_facts Invoked Nov 19 14:35:22 managed-node1 python3.9[6542]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:35:22 managed-node1 python3.9[6649]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:35:50 managed-node1 kernel: SELinux: Converting 370 SID table entries... Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability network_peer_controls=1 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability open_perms=1 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability extended_socket_class=1 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability always_check_network=0 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 19 14:35:50 managed-node1 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Nov 19 14:35:51 managed-node1 dbus-broker-launch[578]: avc: op=load_policy lsm=selinux seqno=2 res=1 Nov 19 14:35:51 managed-node1 systemd[1]: Starting PCP Reboot Initialization Helper Service... ░░ Subject: A start job for unit pcp-reboot-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has begun execution. ░░ ░░ The job identifier is 1036. Nov 19 14:35:51 managed-node1 systemd[1]: Finished PCP Reboot Initialization Helper Service. ░░ Subject: A start job for unit pcp-reboot-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcp-reboot-init.service has finished successfully. ░░ ░░ The job identifier is 1036. Nov 19 14:35:51 managed-node1 systemd[1]: Reloading. Nov 19 14:35:51 managed-node1 systemd-rc-local-generator[7186]: /etc/rc.d/rc.local is not marked executable, skipping. Nov 19 14:35:52 managed-node1 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1039. Nov 19 14:35:52 managed-node1 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1039. Nov 19 14:35:52 managed-node1 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1174. Nov 19 14:35:52 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1104. Nov 19 14:35:52 managed-node1 pmcd[7603]: Installing dm PMDA ... Nov 19 14:35:52 managed-node1 rc[7463]: /etc/pcp/pmlogger/rc: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Nov 19 14:35:52 managed-node1 rc[7460]: /etc/pcp/pmie/rc: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Nov 19 14:35:52 managed-node1 rc[7463]: To enable pmlogger, run the following as root: Nov 19 14:35:52 managed-node1 rc[7463]: # /usr/bin/systemctl enable pmlogger.service Nov 19 14:35:52 managed-node1 rc[7460]: To enable pmie, run the following as root: Nov 19 14:35:52 managed-node1 rc[7460]: # /usr/bin/systemctl enable pmie.service Nov 19 14:35:52 managed-node1 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1174. Nov 19 14:35:52 managed-node1 systemd[1]: Started Half-hourly check of PMIE instances. ░░ Subject: A start job for unit pmie_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.timer has finished successfully. ░░ ░░ The job identifier is 1242. Nov 19 14:35:52 managed-node1 systemd[1]: Started Daily processing of PMIE logs. ░░ Subject: A start job for unit pmie_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_daily.timer has finished successfully. ░░ ░░ The job identifier is 1241. Nov 19 14:35:52 managed-node1 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1175. Nov 19 14:35:52 managed-node1 systemd[1]: Starting Check PMIE instances are running... ░░ Subject: A start job for unit pmie_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has begun execution. ░░ ░░ The job identifier is 1244. Nov 19 14:35:52 managed-node1 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1175. Nov 19 14:35:52 managed-node1 systemd[1]: Started Half-hourly check of pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1240. Nov 19 14:35:52 managed-node1 systemd[1]: Started Check PMIE instances are running. ░░ Subject: A start job for unit pmie_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_check.service has finished successfully. ░░ ░░ The job identifier is 1244. Nov 19 14:35:52 managed-node1 systemd[1]: Starting Check and migrate non-primary pmie farm instances... ░░ Subject: A start job for unit pmie_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has begun execution. ░░ ░░ The job identifier is 1309. Nov 19 14:35:52 managed-node1 systemd[1]: Started Check and migrate non-primary pmie farm instances. ░░ Subject: A start job for unit pmie_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1309. Nov 19 14:35:53 managed-node1 systemd[1]: pmie_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm_check.service has successfully entered the 'dead' state. Nov 19 14:35:53 managed-node1 systemd[1]: pmie_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_check.service has successfully entered the 'dead' state. Nov 19 14:35:53 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1104. Nov 19 14:35:53 managed-node1 systemd[1]: Started Half-hourly check of pmlogger instances. ░░ Subject: A start job for unit pmlogger_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.timer has finished successfully. ░░ ░░ The job identifier is 1105. Nov 19 14:35:53 managed-node1 systemd[1]: Started Daily processing of archives. ░░ Subject: A start job for unit pmlogger_daily.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_daily.timer has finished successfully. ░░ ░░ The job identifier is 1172. Nov 19 14:35:53 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1170. Nov 19 14:35:53 managed-node1 systemd[1]: Reloading. Nov 19 14:35:53 managed-node1 systemd-rc-local-generator[8474]: /etc/rc.d/rc.local is not marked executable, skipping. Nov 19 14:35:54 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1170. Nov 19 14:35:54 managed-node1 systemd[1]: Started Half-hourly check of pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.timer has finished successfully. ░░ ░░ The job identifier is 1171. Nov 19 14:35:54 managed-node1 systemd[1]: Starting Check pmlogger instances are running... ░░ Subject: A start job for unit pmlogger_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has begun execution. ░░ ░░ The job identifier is 1374. Nov 19 14:35:54 managed-node1 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... ░░ Subject: A start job for unit pmlogger_farm_check.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has begun execution. ░░ ░░ The job identifier is 1439. Nov 19 14:35:54 managed-node1 systemd[1]: Started Check pmlogger instances are running. ░░ Subject: A start job for unit pmlogger_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_check.service has finished successfully. ░░ ░░ The job identifier is 1374. Nov 19 14:35:54 managed-node1 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. ░░ Subject: A start job for unit pmlogger_farm_check.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm_check.service has finished successfully. ░░ ░░ The job identifier is 1439. Nov 19 14:35:54 managed-node1 systemd[1]: pmlogger_farm_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: pmlogger_check.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_check.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r8206c097b3214912a2731c94ece9e917.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r8206c097b3214912a2731c94ece9e917.service has finished successfully. ░░ ░░ The job identifier is 1504. Nov 19 14:35:55 managed-node1 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1569. Nov 19 14:35:55 managed-node1 systemd[1]: Reloading. Nov 19 14:35:55 managed-node1 systemd-rc-local-generator[9490]: /etc/rc.d/rc.local is not marked executable, skipping. Nov 19 14:35:55 managed-node1 systemd[1]: Queuing reload/restart jobs for marked units… Nov 19 14:35:55 managed-node1 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1776. Nov 19 14:35:55 managed-node1 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1704. Nov 19 14:35:55 managed-node1 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1776 and the job result is done. Nov 19 14:35:55 managed-node1 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 1704 and the job result is done. Nov 19 14:35:55 managed-node1 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1706. Nov 19 14:35:55 managed-node1 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1634. Nov 19 14:35:55 managed-node1 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1634 and the job result is done. Nov 19 14:35:55 managed-node1 systemd[1]: pmlogger.service: Consumed 1.386s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Nov 19 14:35:55 managed-node1 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Nov 19 14:35:55 managed-node1 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1706 and the job result is done. Nov 19 14:35:55 managed-node1 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1777. Nov 19 14:35:56 managed-node1 pmcd[7608]: Terminated Nov 19 14:35:56 managed-node1 pmcd[7490]: _pmda_setup: Interrupted! Nov 19 14:35:56 managed-node1 pmcd[7490]: _pmda_setup_cleanup: reset .NeedInstall for dm PMDA Nov 19 14:35:56 managed-node1 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Nov 19 14:35:56 managed-node1 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1777 and the job result is done. Nov 19 14:35:56 managed-node1 systemd[1]: pmcd.service: Consumed 1.699s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Nov 19 14:35:56 managed-node1 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1777. Nov 19 14:35:56 managed-node1 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1777. Nov 19 14:35:56 managed-node1 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1706. Nov 19 14:35:56 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1634. Nov 19 14:35:56 managed-node1 pmcd[11813]: Installing dm PMDA ... Nov 19 14:35:57 managed-node1 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1706. Nov 19 14:35:57 managed-node1 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1776. Nov 19 14:35:57 managed-node1 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1776. Nov 19 14:35:58 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1634. Nov 19 14:35:58 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 1704. Nov 19 14:35:58 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 1704. Nov 19 14:35:59 managed-node1 python3.9[14909]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:36:00 managed-node1 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 19 14:36:00 managed-node1 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1569. Nov 19 14:36:00 managed-node1 systemd[1]: man-db-cache-update.service: Consumed 3.115s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service completed and consumed the indicated resources. Nov 19 14:36:00 managed-node1 systemd[1]: run-r8206c097b3214912a2731c94ece9e917.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r8206c097b3214912a2731c94ece9e917.service has successfully entered the 'dead' state. Nov 19 14:36:02 managed-node1 python3.9[16103]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:02 managed-node1 python3.9[16211]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:02 managed-node1 python3.9[16392]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:03 managed-node1 python3.9[16515]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:03 managed-node1 python3.9[16600]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044963.074782-7286-154366644648703/.source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:04 managed-node1 python3.9[16712]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:04 managed-node1 python3.9[16797]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044963.8367236-7308-7280430553074/.source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:04 managed-node1 python3.9[16904]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:04 managed-node1 kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log. Nov 19 14:36:04 managed-node1 kernel: device-mapper: uevent: version 1.0.3 Nov 19 14:36:04 managed-node1 kernel: device-mapper: ioctl: 4.48.0-ioctl (2023-03-01) initialised: dm-devel@redhat.com Nov 19 14:36:05 managed-node1 pmcd[17026]: Installing nfsclient PMDA ... Nov 19 14:36:05 managed-node1 python3.9[17003]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044964.4942524-7323-4059852536762/.source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:05 managed-node1 python3.9[17252]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 19 14:36:05 managed-node1 useradd[17254]: new group: name=metrics, GID=992 Nov 19 14:36:05 managed-node1 useradd[17254]: new user: name=metrics, UID=992, GID=992, home=/home/metrics, shell=/bin/bash, from=/dev/pts/0 Nov 19 14:36:06 managed-node1 python3.9[17367]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:06 managed-node1 sasldblistusers2[17371]: SASL error opening password file. Have you performed the migration from db2 using cyrusbdb2current? Nov 19 14:36:06 managed-node1 sasldblistusers2[17371]: _sasldb_getkeyhandle has failed Nov 19 14:36:06 managed-node1 python3.9[17483]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:06 managed-node1 python3.9[17663]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044966.295485-7368-161048262961827/.source.conf dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:07 managed-node1 python3.9[17770]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:07 managed-node1 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1843. Nov 19 14:36:08 managed-node1 pmcd[17027]: Terminated Nov 19 14:36:08 managed-node1 pmcd[11644]: _pmda_setup: Interrupted! Nov 19 14:36:08 managed-node1 pmcd[11644]: _pmda_setup_cleanup: reset .NeedInstall for nfsclient PMDA Nov 19 14:36:08 managed-node1 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Nov 19 14:36:08 managed-node1 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 1843 and the job result is done. Nov 19 14:36:08 managed-node1 systemd[1]: pmcd.service: Consumed 3.222s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Nov 19 14:36:08 managed-node1 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 1843. Nov 19 14:36:08 managed-node1 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 1843. Nov 19 14:36:08 managed-node1 pmcd[18278]: Installing nfsclient PMDA ... Nov 19 14:36:09 managed-node1 python3.9[18406]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:09 managed-node1 python3.9[18628]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:09 managed-node1 python3.9[18735]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:10 managed-node1 python3.9[18842]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:10 managed-node1 python3.9[19009]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:10 managed-node1 python3.9[19116]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:11 managed-node1 python3.9[19223]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:11 managed-node1 python3.9[19348]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:12 managed-node1 python3.9[19517]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:12 managed-node1 python3.9[19604]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044971.8511987-7488-57446781458777/.source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:12 managed-node1 python3.9[19711]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:13 managed-node1 python3.9[19803]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044972.467969-7488-122199016746313/.source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:13 managed-node1 python3.9[19910]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:13 managed-node1 python3.9[19997]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044973.0866504-7488-110127995979811/.source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:13 managed-node1 pmcd[20115]: Installing openmetrics PMDA ... Nov 19 14:36:14 managed-node1 python3.9[20119]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Note: running as user "pcp" Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Initializing ... currently in notready state. Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Config change detected, traversed 3 config entries in 0.0001s, rescanning ... Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Found source grafana cluster 1 Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Found source kepler cluster 2 Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Found source vllm cluster 3 Nov 19 14:36:14 managed-node1 pmcd[20147]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20147) Info: Ready to process requests Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Note: running as user "pcp" Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Initializing ... currently in notready state. Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Config change detected, traversed 3 config entries in 0.0001s, rescanning ... Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Found source grafana cluster 1 Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Found source kepler cluster 2 Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Found source vllm cluster 3 Nov 19 14:36:14 managed-node1 pmcd[20205]: [Tue Nov 19 14:36:14] pmdaopenmetrics(20205) Info: Ready to process requests Nov 19 14:36:14 managed-node1 python3.9[20241]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044973.7052755-7488-86593144051501/.source dest=/etc/pcp/pmieconf/power/thermal_throttle owner=root group=root mode=0644 _original_basename=thermal_throttle follow=False checksum=1d53d6182709617c8f633339652d8d9e75f3b603 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:14 managed-node1 python3.9[20435]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:15 managed-node1 python3.9[20522]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044974.557507-7488-40092287619231/.source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:15 managed-node1 python3.9[20629]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:15 managed-node1 python3.9[20683]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:16 managed-node1 python3.9[20886]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:16 managed-node1 python3.9[20993]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:16 managed-node1 python3.9[21100]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:17 managed-node1 python3.9[21212]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:17 managed-node1 python3.9[21319]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:17 managed-node1 python3.9[21426]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:18 managed-node1 python3.9[21545]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:18 managed-node1 python3.9[21652]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:18 managed-node1 systemd[1]: Stopping pmie farm service... ░░ Subject: A stop job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1978. Nov 19 14:36:18 managed-node1 systemd[1]: pmie_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie_farm.service has successfully entered the 'dead' state. Nov 19 14:36:18 managed-node1 systemd[1]: Stopped pmie farm service. ░░ Subject: A stop job for unit pmie_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie_farm.service has finished. ░░ ░░ The job identifier is 1978 and the job result is done. Nov 19 14:36:18 managed-node1 systemd[1]: Stopping Performance Metrics Inference Engine... ░░ Subject: A stop job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1908. Nov 19 14:36:19 managed-node1 systemd[1]: pmie.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmie.service has successfully entered the 'dead' state. Nov 19 14:36:19 managed-node1 systemd[1]: Stopped Performance Metrics Inference Engine. ░░ Subject: A stop job for unit pmie.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmie.service has finished. ░░ ░░ The job identifier is 1908 and the job result is done. Nov 19 14:36:19 managed-node1 systemd[1]: Starting Performance Metrics Inference Engine... ░░ Subject: A start job for unit pmie.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has begun execution. ░░ ░░ The job identifier is 1908. Nov 19 14:36:19 managed-node1 systemd[1]: Started Performance Metrics Inference Engine. ░░ Subject: A start job for unit pmie.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie.service has finished successfully. ░░ ░░ The job identifier is 1908. Nov 19 14:36:19 managed-node1 systemd[1]: Starting pmie farm service... ░░ Subject: A start job for unit pmie_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has begun execution. ░░ ░░ The job identifier is 1978. Nov 19 14:36:19 managed-node1 systemd[1]: Started pmie farm service. ░░ Subject: A start job for unit pmie_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmie_farm.service has finished successfully. ░░ ░░ The job identifier is 1978. Nov 19 14:36:19 managed-node1 python3.9[22283]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:20 managed-node1 python3.9[22390]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:20 managed-node1 python3.9[22477]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044979.9859774-7702-15953400376570/.source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:20 managed-node1 python3.9[22584]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:21 managed-node1 python3.9[22671]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732044980.6549911-7724-280791974101945/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=cb4ba174284a3ed6fb6ab3e0b10cd8354f1dfc4c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:21 managed-node1 python3.9[22778]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:21 managed-node1 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2049. Nov 19 14:36:21 managed-node1 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:36:21 managed-node1 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2049 and the job result is done. Nov 19 14:36:21 managed-node1 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1979. Nov 19 14:36:21 managed-node1 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:36:21 managed-node1 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 1979 and the job result is done. Nov 19 14:36:21 managed-node1 systemd[1]: pmlogger.service: Consumed 1.473s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Nov 19 14:36:21 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 1979. Nov 19 14:36:22 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 1979. Nov 19 14:36:22 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2049. Nov 19 14:36:22 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2049. Nov 19 14:36:23 managed-node1 python3.9[23789]: ansible-ansible.legacy.command Invoked with _raw_params=pcp _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:24 managed-node1 python3.9[23987]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmlogger.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:24 managed-node1 python3.9[24097]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:24 managed-node1 python3.9[24205]: ansible-ansible.legacy.command Invoked with _raw_params=grep "^# Ansible managed" "/etc/sysconfig/pmlogger_timers" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:25 managed-node1 python3.9[24313]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail pmprobe -I pmcd.pmie.pmcd_host | grep '"primary"' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:25 managed-node1 python3.9[24423]: ansible-ansible.legacy.command Invoked with _raw_params=grep -e '--discard 137' /etc/sysconfig/pmlogger_timers _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:26 managed-node1 python3.9[24531]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:36:26 managed-node1 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2120. Nov 19 14:36:26 managed-node1 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:36:26 managed-node1 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2120 and the job result is done. Nov 19 14:36:26 managed-node1 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2050. Nov 19 14:36:26 managed-node1 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:36:26 managed-node1 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 2050 and the job result is done. Nov 19 14:36:26 managed-node1 systemd[1]: pmlogger.service: Consumed 1.195s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Nov 19 14:36:26 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2050. Nov 19 14:36:26 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 2050. Nov 19 14:36:26 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2120. Nov 19 14:36:26 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2120. Nov 19 14:36:27 managed-node1 python3.9[25173]: ansible-service_facts Invoked Nov 19 14:36:31 managed-node1 python3.9[25766]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 19 14:36:32 managed-node1 python3.9[25899]: ansible-service_facts Invoked Nov 19 14:36:34 managed-node1 python3.9[26090]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:36:35 managed-node1 python3.9[26197]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:36:36 managed-node1 python3.9[26305]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:36:37 managed-node1 python3.9[26413]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:38 managed-node1 python3.9[26521]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:38 managed-node1 python3.9[26628]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:39 managed-node1 python3.9[26735]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:39 managed-node1 python3.9[26789]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:39 managed-node1 python3.9[26896]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:39 managed-node1 python3.9[26950]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:40 managed-node1 python3.9[27057]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:40 managed-node1 python3.9[27111]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:41 managed-node1 python3.9[27218]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 19 14:36:41 managed-node1 python3.9[27327]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:36:41 managed-node1 python3.9[27439]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:42 managed-node1 python3.9[27493]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:42 managed-node1 python3.9[27600]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:43 managed-node1 python3.9[27709]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:43 managed-node1 python3.9[27816]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:44 managed-node1 python3.9[27923]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:44 managed-node1 python3.9[28030]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:44 managed-node1 python3.9[28137]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:45 managed-node1 python3.9[28244]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:45 managed-node1 python3.9[28351]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:45 managed-node1 python3.9[28458]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:46 managed-node1 python3.9[28565]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:46 managed-node1 python3.9[28619]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:46 managed-node1 python3.9[28726]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:46 managed-node1 python3.9[28780]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:47 managed-node1 python3.9[28887]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:47 managed-node1 python3.9[28941]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:47 managed-node1 python3.9[29048]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:48 managed-node1 python3.9[29102]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:48 managed-node1 python3.9[29209]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:48 managed-node1 python3.9[29263]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:49 managed-node1 python3.9[29370]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:49 managed-node1 python3.9[29424]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:49 managed-node1 python3.9[29531]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:50 managed-node1 python3.9[29638]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:50 managed-node1 python3.9[29745]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:50 managed-node1 python3.9[29852]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:51 managed-node1 python3.9[29959]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:51 managed-node1 python3.9[30066]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:51 managed-node1 python3.9[30173]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:52 managed-node1 python3.9[30280]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:53 managed-node1 python3.9[30389]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:53 managed-node1 python3.9[30496]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:53 managed-node1 python3.9[30550]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:54 managed-node1 python3.9[30657]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:36:54 managed-node1 python3.9[30744]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1732045013.7796335-8977-35300418737906/.source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:36:55 managed-node1 python3.9[30851]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:36:55 managed-node1 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2191. Nov 19 14:36:55 managed-node1 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:36:55 managed-node1 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2191 and the job result is done. Nov 19 14:36:55 managed-node1 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2121. Nov 19 14:36:55 managed-node1 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:36:55 managed-node1 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 2121 and the job result is done. Nov 19 14:36:55 managed-node1 systemd[1]: pmlogger.service: Consumed 1.235s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Nov 19 14:36:55 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2121. Nov 19 14:36:55 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 2121. Nov 19 14:36:55 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2191. Nov 19 14:36:55 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2191. Nov 19 14:36:56 managed-node1 python3.9[31872]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:36:57 managed-node1 systemd[1]: Stopping pmlogger farm service... ░░ Subject: A stop job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2262. Nov 19 14:36:57 managed-node1 systemd[1]: pmlogger_farm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger_farm.service has successfully entered the 'dead' state. Nov 19 14:36:57 managed-node1 systemd[1]: Stopped pmlogger farm service. ░░ Subject: A stop job for unit pmlogger_farm.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger_farm.service has finished. ░░ ░░ The job identifier is 2262 and the job result is done. Nov 19 14:36:57 managed-node1 systemd[1]: Stopping Performance Metrics Archive Logger... ░░ Subject: A stop job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2192. Nov 19 14:36:57 managed-node1 systemd[1]: pmlogger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service has successfully entered the 'dead' state. Nov 19 14:36:57 managed-node1 systemd[1]: Stopped Performance Metrics Archive Logger. ░░ Subject: A stop job for unit pmlogger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmlogger.service has finished. ░░ ░░ The job identifier is 2192 and the job result is done. Nov 19 14:36:57 managed-node1 systemd[1]: pmlogger.service: Consumed 1.197s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmlogger.service completed and consumed the indicated resources. Nov 19 14:36:57 managed-node1 systemd[1]: Starting Performance Metrics Archive Logger... ░░ Subject: A start job for unit pmlogger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has begun execution. ░░ ░░ The job identifier is 2192. Nov 19 14:36:57 managed-node1 systemd[1]: Started Performance Metrics Archive Logger. ░░ Subject: A start job for unit pmlogger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger.service has finished successfully. ░░ ░░ The job identifier is 2192. Nov 19 14:36:57 managed-node1 systemd[1]: Starting pmlogger farm service... ░░ Subject: A start job for unit pmlogger_farm.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has begun execution. ░░ ░░ The job identifier is 2262. Nov 19 14:36:57 managed-node1 systemd[1]: Started pmlogger farm service. ░░ Subject: A start job for unit pmlogger_farm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmlogger_farm.service has finished successfully. ░░ ░░ The job identifier is 2262. Nov 19 14:36:58 managed-node1 python3.9[32508]: ansible-service_facts Invoked Nov 19 14:37:01 managed-node1 python3.9[33079]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:37:01 managed-node1 python3.9[33187]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:37:02 managed-node1 python3.9[33295]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:37:02 managed-node1 python3.9[33403]: ansible-ansible.legacy.systemd Invoked with name=pmproxy state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Nov 19 14:37:04 managed-node1 python3.9[33546]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 19 14:37:04 managed-node1 python3.9[33679]: ansible-service_facts Invoked Nov 19 14:37:07 managed-node1 python3.9[33870]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:37:07 managed-node1 python3.9[33977]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:37:09 managed-node1 python3.9[34085]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:37:10 managed-node1 python3.9[34193]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:10 managed-node1 python3.9[34301]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:11 managed-node1 python3.9[34408]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:11 managed-node1 python3.9[34515]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:12 managed-node1 python3.9[34569]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:12 managed-node1 python3.9[34676]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:12 managed-node1 python3.9[34730]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:12 managed-node1 python3.9[34837]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:13 managed-node1 python3.9[34891]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:13 managed-node1 python3.9[34998]: ansible-user Invoked with name=pcptest system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Nov 19 14:37:13 managed-node1 useradd[35000]: new group: name=pcptest, GID=991 Nov 19 14:37:13 managed-node1 useradd[35000]: new user: name=pcptest, UID=991, GID=991, home=/home/pcptest, shell=/bin/bash, from=/dev/pts/0 Nov 19 14:37:14 managed-node1 python3.9[35113]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^pcptest@"; then echo "Creating new pcptest user in /etc/pcp/passwd.db" echo "t;dlen;dle" | saslpasswd2 -a pmcd "pcptest" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:14 managed-node1 python3.9[35229]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:14 managed-node1 python3.9[35283]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:15 managed-node1 python3.9[35390]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:16 managed-node1 python3.9[35499]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:16 managed-node1 python3.9[35606]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:16 managed-node1 python3.9[35713]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:17 managed-node1 python3.9[35820]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:17 managed-node1 python3.9[35927]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:17 managed-node1 python3.9[36034]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:18 managed-node1 python3.9[36141]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:18 managed-node1 python3.9[36248]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:18 managed-node1 python3.9[36355]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:19 managed-node1 python3.9[36409]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:19 managed-node1 python3.9[36516]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:19 managed-node1 python3.9[36570]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:19 managed-node1 python3.9[36677]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:20 managed-node1 python3.9[36731]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:20 managed-node1 python3.9[36838]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:20 managed-node1 python3.9[36892]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:21 managed-node1 python3.9[36999]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:21 managed-node1 python3.9[37053]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:21 managed-node1 python3.9[37160]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:22 managed-node1 python3.9[37214]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:22 managed-node1 python3.9[37321]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:22 managed-node1 python3.9[37428]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:23 managed-node1 python3.9[37535]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:23 managed-node1 python3.9[37642]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:23 managed-node1 python3.9[37749]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:24 managed-node1 python3.9[37856]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:24 managed-node1 python3.9[37963]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:25 managed-node1 python3.9[38070]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:25 managed-node1 python3.9[38179]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:25 managed-node1 python3.9[38286]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:26 managed-node1 python3.9[38340]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:26 managed-node1 python3.9[38447]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Nov 19 14:37:26 managed-node1 python3.9[38501]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 19 14:37:27 managed-node1 python3.9[38608]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:28 managed-node1 python3.9[38717]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:37:28 managed-node1 python3.9[38824]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 19 14:37:29 managed-node1 python3.9[38931]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Nov 19 14:37:30 managed-node1 python3.9[39039]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Nov 19 14:37:31 managed-node1 python3.9[39148]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Nov 19 14:37:31 managed-node1 systemd[1]: Reloading. Nov 19 14:37:31 managed-node1 systemd-rc-local-generator[39166]: /etc/rc.d/rc.local is not marked executable, skipping. Nov 19 14:37:31 managed-node1 systemd[1]: Starting firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 2263. Nov 19 14:37:31 managed-node1 systemd[1]: Started firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 2263. Nov 19 14:37:31 managed-node1 kernel: Warning: Unmaintained driver is detected: ip_set Nov 19 14:37:32 managed-node1 python3.9[39332]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['44321/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Nov 19 14:37:33 managed-node1 python3.9[39440]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl restart pmcd && sleep 5 _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:33 managed-node1 systemd[1]: Stopping Performance Metrics Collector Daemon... ░░ Subject: A stop job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2334. Nov 19 14:37:34 managed-node1 systemd[1]: pmcd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service has successfully entered the 'dead' state. Nov 19 14:37:34 managed-node1 systemd[1]: Stopped Performance Metrics Collector Daemon. ░░ Subject: A stop job for unit pmcd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit pmcd.service has finished. ░░ ░░ The job identifier is 2334 and the job result is done. Nov 19 14:37:34 managed-node1 systemd[1]: pmcd.service: Consumed 4.988s CPU time. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit pmcd.service completed and consumed the indicated resources. Nov 19 14:37:34 managed-node1 systemd[1]: Starting Performance Metrics Collector Daemon... ░░ Subject: A start job for unit pmcd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has begun execution. ░░ ░░ The job identifier is 2334. Nov 19 14:37:34 managed-node1 systemd[1]: Started Performance Metrics Collector Daemon. ░░ Subject: A start job for unit pmcd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pmcd.service has finished successfully. ░░ ░░ The job identifier is 2334. Nov 19 14:37:39 managed-node1 python3.9[40058]: ansible-ansible.legacy.command Invoked with _raw_params=id "pcptest" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:40 managed-node1 python3.9[40166]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail sasldblistusers2 -f /etc/pcp/passwd.db | grep -wq "pcptest" _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:40 managed-node1 python3.9[40276]: ansible-ansible.legacy.command Invoked with _raw_params=pminfo -f -h "pcp://127.0.0.1?username=pcptest&password=t;dlen;dle" disk.dev.read _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:41 managed-node1 python3.9[40384]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:41 managed-node1 python3.9[40492]: ansible-ansible.legacy.command Invoked with _raw_params=firewall-cmd --list-ports _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Nov 19 14:37:42 managed-node1 python3.9[40600]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 18 root root 235 Nov 12 04:08 ../ drwxr-xr-x. 2 root root 60 Nov 19 14:29 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Nov 19 14:29 log/ drwxr-xr-x. 2 root root 40 Nov 19 14:29 mount/ drwxr-xr-x. 4 root root 100 Nov 19 14:29 initramfs/ -r--r--r--. 1 root root 33 Nov 19 14:29 machine-id srw-rw-rw-. 1 root root 0 Nov 19 14:30 rpcbind.sock= prw-------. 1 root root 0 Nov 19 14:30 initctl| drwxr-xr-x. 5 root root 100 Nov 19 14:30 credentials/ drwx------. 2 root root 40 Nov 19 14:30 cryptsetup/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 setrans/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 faillock/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 console/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 sepermit/ drwxr-xr-x. 2 root root 40 Nov 19 14:30 motd.d/ drwx--x--x. 3 root root 60 Nov 19 14:30 sudo/ -rw-r--r--. 1 root root 0 Nov 19 14:30 motd drwxr-xr-x. 3 root root 60 Nov 19 14:30 tpm2-tss/ drwx------. 2 rpc rpc 60 Nov 19 14:30 rpcbind/ -rw-r--r--. 1 root root 4 Nov 19 14:30 auditd.pid drwxr-xr-x. 2 root root 60 Nov 19 14:30 dbus/ srw-rw-rw-. 1 root root 0 Nov 19 14:30 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Nov 19 14:30 irqbalance/ -rw-r--r--. 1 root root 4 Nov 19 14:30 dhclient.pid -rw-r--r--. 1 root root 615 Nov 19 14:30 dhclient.lease drwxr-xr-x. 2 root root 60 Nov 19 14:30 chrony-dhcp/ -rw-------. 1 root root 4 Nov 19 14:30 gssproxy.pid drwxr-x---. 2 chrony chrony 80 Nov 19 14:30 chrony/ srw-rw-rw-. 1 root root 0 Nov 19 14:30 gssproxy.sock= -rw-------. 1 root root 4 Nov 19 14:30 sm-notify.pid drwxr-xr-x. 3 root root 80 Nov 19 14:30 lock/ -rw-------. 1 root root 3 Nov 19 14:30 rsyslogd.pid -rw-r--r--. 1 root root 4 Nov 19 14:30 sshd.pid -rw-r--r--. 1 root root 4 Nov 19 14:30 crond.pid ----------. 1 root root 0 Nov 19 14:30 cron.reboot drwx------. 3 root root 340 Nov 19 14:30 cloud-init/ -rw-------. 1 root root 0 Nov 19 14:30 agetty.reload drwxr-xr-x. 2 root root 80 Nov 19 14:30 blkid/ drwxr-xr-x. 3 root root 60 Nov 19 14:33 user/ drwxr-xr-x. 6 root root 160 Nov 19 14:34 NetworkManager/ drwxr-xr-x. 22 root root 560 Nov 19 14:37 systemd/ drwxr-xr-x. 29 root root 920 Nov 19 14:37 ./ drwxr-x---. 2 root root 40 Nov 19 14:37 firewalld/ drwxr-xr-x. 7 root root 160 Nov 19 14:37 udev/ drwxrwxr-x. 2 pcp pcp 160 Nov 19 14:37 pcp/ -rw-rw-r--. 1 root utmp 1920 Nov 19 14:37 utmp total 12 -r--r--r--. 1 pcp pcp 5 Nov 19 14:36 pmie.pid -r--r--r--. 1 pcp pcp 5 Nov 19 14:36 pmlogger.pid srw-rw-rw-. 1 pcp pcp 0 Nov 19 14:36 pmlogger.32157.socket= lrwxrwxrwx. 1 pcp pcp 30 Nov 19 14:36 pmlogger.primary.socket -> /run/pcp/pmlogger.32157.socket= drwxr-xr-x. 29 root root 920 Nov 19 14:37 ../ srw-rw-rw-. 1 root root 0 Nov 19 14:37 pmcd.socket= -r--r--r--. 1 root root 5 Nov 19 14:37 pmcd.pid drwxrwxr-x. 2 pcp pcp 160 Nov 19 14:37 ./ TASK [Reraise error] *********************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Tuesday 19 November 2024 14:37:42 -0500 (0:00:00.429) 0:00:39.086 ****** fatal: [managed-node1]: FAILED! => { "changed": false } MSG: {'changed': False, 'stdout': '44321/tcp', 'stderr': '', 'rc': 0, 'cmd': ['firewall-cmd', '--list-ports'], 'start': '2024-11-19 14:37:41.878922', 'end': '2024-11-19 14:37:42.082574', 'delta': '0:00:00.203652', 'msg': '', 'invocation': {'module_args': {'_raw_params': 'firewall-cmd --list-ports', '_uses_shell': False, 'expand_argument_vars': True, 'stdin_add_newline': True, 'strip_empty_ends': True, 'argv': None, 'chdir': None, 'executable': None, 'creates': None, 'removes': None, 'stdin': None}}, 'stdout_lines': ['44321/tcp'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': True, 'failed_when_result': True} TASK [Get final state of services] ********************************************* task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Tuesday 19 November 2024 14:37:42 -0500 (0:00:00.027) 0:00:39.113 ****** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcp-reboot-init.service": { "name": "pcp-reboot-init.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "redis.service": { "name": "redis.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Tuesday 19 November 2024 14:37:44 -0500 (0:00:01.628) 0:00:40.742 ****** ok: [managed-node1] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "item": "pmcd", "name": "pmcd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:37:34 EST", "ActiveEnterTimestampMonotonic": "463474700", "ActiveExitTimestamp": "Tue 2024-11-19 14:37:33 EST", "ActiveExitTimestampMonotonic": "462410852", "ActiveState": "active", "After": "network-online.target avahi-daemon.service systemd-journald.socket sysinit.target system.slice basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:37:34 EST", "AssertTimestampMonotonic": "463177613", "Before": "multi-user.target pmie.service shutdown.target pmproxy.service pmlogger.service zabbix-agent.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "422902000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:37:34 EST", "ConditionTimestampMonotonic": "463177609", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlGroupId": "4969", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmcd(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "39871", "ExecMainStartTimestamp": "Tue 2024-11-19 14:37:34 EST", "ExecMainStartTimestampMonotonic": "463474666", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:37:34 EST] ; stop_time=[n/a] ; pid=39797 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; flags= ; start_time=[Tue 2024-11-19 14:37:34 EST] ; stop_time=[n/a] ; pid=39797 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:37:34 EST", "InactiveEnterTimestampMonotonic": "463177095", "InactiveExitTimestamp": "Tue 2024-11-19 14:37:34 EST", "InactiveExitTimestampMonotonic": "463180458", "InvocationID": "ef6fe0b910dc48ed961a9f666acc639c", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "39871", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "39305216", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmcd.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:37:34 EST", "StateChangeTimestampMonotonic": "463474700", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "9", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "pmlogger.service multi-user.target pmie.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node1] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "item": "pmlogger", "name": "pmlogger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:36:57 EST", "ActiveEnterTimestampMonotonic": "426740396", "ActiveExitTimestamp": "Tue 2024-11-19 14:36:57 EST", "ActiveExitTimestampMonotonic": "426020276", "ActiveState": "active", "After": "system.slice systemd-journald.socket pcp-reboot-init.service basic.target sysinit.target pmcd.service network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:36:57 EST", "AssertTimestampMonotonic": "426092990", "Before": "pmlogger_daily.timer pmlogger_farm.service shutdown.target pmlogger_check.timer multi-user.target", "BindsTo": "pmlogger_farm.service pmlogger_check.timer pmlogger_daily.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "1161940000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:36:57 EST", "ConditionTimestampMonotonic": "426092987", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlGroupId": "4858", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "\"man:pmlogger(1)\"", "DynamicUser": "no", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "32157", "ExecMainStartTimestamp": "Tue 2024-11-19 14:36:57 EST", "ExecMainStartTimestampMonotonic": "426740358", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmlogger/rc ; argv[]=/etc/pcp/pmlogger/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:36:57 EST", "InactiveEnterTimestampMonotonic": "426092264", "InactiveExitTimestamp": "Tue 2024-11-19 14:36:57 EST", "InactiveExitTimestampMonotonic": "426100413", "InvocationID": "c0a14659ecdc4a2eb8b3aac4091aa759", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "32157", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "3088384", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmlogger.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:36:57 EST", "StateChangeTimestampMonotonic": "426740396", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22329", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node1] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "item": "pmie", "name": "pmie", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:36:19 EST", "ActiveEnterTimestampMonotonic": "388441198", "ActiveExitTimestamp": "Tue 2024-11-19 14:36:18 EST", "ActiveExitTimestampMonotonic": "387995561", "ActiveState": "active", "After": "sysinit.target system.slice pcp-reboot-init.service network-online.target systemd-journald.socket basic.target pmcd.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:36:19 EST", "AssertTimestampMonotonic": "388229892", "Before": "shutdown.target pmie_farm.service multi-user.target pmie_check.timer pmie_daily.timer", "BindsTo": "pmie_farm.service pmie_check.timer pmie_daily.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "286897000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:36:19 EST", "ConditionTimestampMonotonic": "388229888", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlGroupId": "4562", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "\"man:pmie(1)\"", "DynamicUser": "no", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "22035", "ExecMainStartTimestamp": "Tue 2024-11-19 14:36:19 EST", "ExecMainStartTimestampMonotonic": "388441166", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/etc/pcp/pmie/rc ; argv[]=/etc/pcp/pmie/rc stop-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "993", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2024-11-19 14:36:19 EST", "InactiveEnterTimestampMonotonic": "388229415", "InactiveExitTimestamp": "Tue 2024-11-19 14:36:19 EST", "InactiveExitTimestampMonotonic": "388234371", "InvocationID": "b11284b554a74775953fe7f3cb77b101", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "22035", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "1671168", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/pcp/pmie.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:36:19 EST", "StateChangeTimestampMonotonic": "388441198", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22329", "TimeoutAbortUSec": "2min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "2min", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "993", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "User": "pcp", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } ok: [managed-node1] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "item": "pmproxy", "name": "pmproxy", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "avahi-daemon.service network-online.target systemd-journald.socket redis.service system.slice pcp-reboot-init.service sysinit.target basic.target pmcd.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "0", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "\"man:pmproxy(1)\"", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/etc/pcp/pmproxy/rc ; argv[]=/etc/pcp/pmproxy/rc start-systemd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "Group": "pcp", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Markers": "needs-restart", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:35:51 EST", "StateChangeTimestampMonotonic": "360884783", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "User": "pcp", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } skipping: [managed-node1] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "initial_state.ansible_facts.services[item + '.service']['status'] != 'not-found'", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } TASK [Stop firewall] *********************************************************** task path: /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:29 Tuesday 19 November 2024 14:37:46 -0500 (0:00:01.866) 0:00:42.608 ****** changed: [managed-node1] => { "changed": true, "name": "firewalld", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Tue 2024-11-19 14:37:31 EST", "ActiveEnterTimestampMonotonic": "460833568", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "dbus.socket sysinit.target polkit.service basic.target dbus-broker.service system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Tue 2024-11-19 14:37:31 EST", "AssertTimestampMonotonic": "460601179", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "655907000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2024-11-19 14:37:31 EST", "ConditionTimestampMonotonic": "460601176", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service iptables.service ipset.service shutdown.target ebtables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4932", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "39181", "ExecMainStartTimestamp": "Tue 2024-11-19 14:37:31 EST", "ExecMainStartTimestampMonotonic": "460613336", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[Tue 2024-11-19 14:37:31 EST] ; stop_time=[n/a] ; pid=39181 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[Tue 2024-11-19 14:37:31 EST] ; stop_time=[n/a] ; pid=39181 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Tue 2024-11-19 14:37:31 EST", "InactiveExitTimestampMonotonic": "460613681", "InvocationID": "1083a05b58f847b2a6b1d8f3f7bbbdea", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13956", "LimitNPROCSoft": "13956", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13956", "LimitSIGPENDINGSoft": "13956", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "39181", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "33415168", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2024-11-19 14:37:31 EST", "StateChangeTimestampMonotonic": "460833568", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22329", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } PLAY RECAP ********************************************************************* managed-node1 : ok=66 changed=4 unreachable=0 failed=1 skipped=48 rescued=1 ignored=0 Tuesday 19 November 2024 14:37:46 -0500 (0:00:00.544) 0:00:43.152 ****** =============================================================================== Restart PMCD ------------------------------------------------------------ 6.42s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:35 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts --- 3.56s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts --- 1.97s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Restore state of services ----------------------------------------------- 1.87s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Get initial state of services ------------------------------------------- 1.76s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Get final state of services --------------------------------------------- 1.63s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages --- 1.49s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:27 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist --- 1.38s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist --- 1.37s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages --- 1.27s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:33 fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.27s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Gathering Facts --------------------------------------------------------- 1.03s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_auth.yml:9 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 0.98s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.firewall : Configure firewall ----------------- 0.76s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot --- 0.76s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured --- 0.64s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set --- 0.63s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 Check firewall port status for pmcd; metrics_manage_firewall is true ---- 0.63s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/tests/metrics/check_firewall_selinux.yml:54 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured --- 0.62s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured --- 0.61s /tmp/collections-jt1/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12