ansible-playbook [core 2.17.7]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.13/site-packages/ansible
ansible collection location = /tmp/collections-mM1
executable location = /usr/local/bin/ansible-playbook
python version = 3.13.0 (main, Oct 8 2024, 00:00:00) [GCC 14.2.1 20240912 (Red Hat 14.2.1-3)] (/usr/bin/python3.13)
jinja version = 3.1.4
libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml
PLAY [all] *********************************************************************
TASK [Include vault variables] *************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Tuesday 03 December 2024 18:25:51 -0500 (0:00:00.008) 0:00:00.008 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_test_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
},
"mysql_container_root_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
}
},
"ansible_included_var_files": [
"/tmp/podman-x1m/tests/vars/vault-variables.yml"
],
"changed": false
}
PLAY [Deploy the quadlet demo app] *********************************************
TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Tuesday 03 December 2024 18:25:51 -0500 (0:00:00.025) 0:00:00.034 ******
[WARNING]: Platform linux on host managed-node1 is using the discovered Python
interpreter at /usr/bin/python3, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node1]
TASK [Test is only supported on x86_64] ****************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38
Tuesday 03 December 2024 18:25:52 -0500 (0:00:01.289) 0:00:01.324 ******
skipping: [managed-node1] => {
"false_condition": "ansible_facts[\"architecture\"] != \"x86_64\""
}
TASK [End test] ****************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45
Tuesday 03 December 2024 18:25:52 -0500 (0:00:00.023) 0:00:01.347 ******
META: end_play conditional evaluated to False, continuing play
skipping: [managed-node1] => {
"skip_reason": "end_play conditional evaluated to False, continuing play"
}
MSG:
end_play
TASK [Generate certificates] ***************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.011) 0:00:01.359 ******
included: fedora.linux_system_roles.certificate for managed-node1
TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.042) 0:00:01.402 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.029) 0:00:01.431 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.033) 0:00:01.465 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.497) 0:00:01.962 ******
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.035) 0:00:01.997 ******
skipping: [managed-node1] => (item=RedHat.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "RedHat.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=Fedora.yml) => {
"ansible_facts": {
"__certificate_certmonger_packages": [
"certmonger",
"python3-packaging"
]
},
"ansible_included_var_files": [
"/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/Fedora.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "Fedora.yml"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Tuesday 03 December 2024 18:25:53 -0500 (0:00:00.055) 0:00:02.052 ******
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
Tuesday 03 December 2024 18:25:55 -0500 (0:00:01.376) 0:00:03.429 ******
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35
Tuesday 03 December 2024 18:25:56 -0500 (0:00:01.357) 0:00:04.786 ******
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//pre-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61
Tuesday 03 December 2024 18:25:57 -0500 (0:00:00.566) 0:00:05.352 ******
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//post-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90
Tuesday 03 December 2024 18:25:57 -0500 (0:00:00.499) 0:00:05.852 ******
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"enabled": true,
"name": "certmonger",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:21:24 EST",
"ActiveEnterTimestampMonotonic": "418937939",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice sysinit.target basic.target systemd-journald.socket syslog.target dbus.socket dbus-broker.service network.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:21:24 EST",
"AssertTimestampMonotonic": "418892694",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedorahosted.certmonger",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "484011000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:21:24 EST",
"ConditionTimestampMonotonic": "418892690",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/certmonger.service",
"ControlGroupId": "5981",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "Certificate monitoring and PKI enrollment",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Tue 2024-12-03 18:21:24 EST",
"ExecMainHandoffTimestampMonotonic": "418907833",
"ExecMainPID": "7894",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:21:24 EST",
"ExecMainStartTimestampMonotonic": "418894199",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/certmonger.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "certmonger.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:21:24 EST",
"InactiveExitTimestampMonotonic": "418896337",
"InvocationID": "2570ef824628466591a64c7093b92e0d",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "7894",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3441385472",
"MemoryCurrent": "3047424",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "9351168",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "certmonger.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"PIDFile": "/run/certmonger.pid",
"PartOf": "dbus-broker.service",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:23:39 EST",
"StateChangeTimestampMonotonic": "554390179",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "1",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101
Tuesday 03 December 2024 18:25:58 -0500 (0:00:00.977) 0:00:06.829 ******
changed: [managed-node1] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
}
MSG:
Certificate requested (new).
TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
Tuesday 03 December 2024 18:25:59 -0500 (0:00:01.021) 0:00:07.851 ******
ok: [managed-node1] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRS3M4a3JyUkxSN21QQXlhY1R3VjBSVEFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTW1GagpaakkwWVdVdFlqUTBZalEzWWprdE9HWXdNekkyT1dNdE5HWXdOVGMwTkRNd0hoY05NalF4TWpBek1qTXlOVFU1CldoY05NalV4TWpBek1qTXlNVEkwV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ2g4eG5LRXJBSlN4MDZsVE8wdzlKemxTWDRNeFM3c2ZIbgpscVZXMlZROG45a3RoT3hTbFhwazg5TnlWVXJ6RmtFUENFaVlKSDRKalhlSEE3dCtGZ0FNS3NseXJmN01SbEtTCkZ2aGtNRzBra2dNY3gvd1liNzdNWXpPTmhPZnRwa1RSTWo0NXRSK2FpOEQrOEpUSU0vOUs1UUN1MXIvUDlJTUkKelY4RW5BVm1ZMDZIS3VTU1JMb2x0dHhMcjBsUWh5d3hrajRmT1lRVFhmVXdJWkR4QXg1SEVoWVZaL1ZvQXUwQwpJVFZCS1QrLzdMS2pWQlMwT1N1VnhUdkVuTldwK2MzVkhTeTFyY2liZnB4MkZJdmlaaEtLUjVNcUFqUzl5Qk00ClR2eDVwZ1ZTL2xBM2lWTVBaWkt1R3lReVVrd3F6RmZkT1RneFl5dld4WllRVFpYMWpuY1pBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVCZTU5CjFtSTQ1YWpFS08xaHBmbHAyaEdsa1Q0d0h3WURWUjBqQkJnd0ZvQVVuQ1pxSk5XYVd3VHo3MXNmQUNPdmFEdTgKaXF3d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFKa0oyUEdzN3dFNU5WYk44UGRWYVdQbUVYdlhpV2k4NHErSAp2cGxGemNVV2k0RmpDOVkyZ3hDNnFDMXpEYmMxWnlTek9abk9GSm54allpYjhka2hUR2N6VndOMTZHU1RlNk9tCk1ZWUpkc0x2dTBXSndMb0tnVk5idk1mVlBzL1JlZmh3QzRQSE5Ydk5yUC9ocmlEV2NKbGNqbmhCanZZK3B3MXAKZVM2WjFtZFZuM0lkb1ZtNGNIbEZUSFd1dDRyOSswQzJkTkx6b1J3Q29aRWZBQWZtZzViWm1WOWNmd3ZxdmpqQwp3YW43b2wxNnYwamlEMTh0RHhFMWNPQkEwL3pUTTFUam5mM3dUY1lmVXQ4Mi9pT1ZSZkZiU25GaVZZcStyKzc1CjI0cTB4bVRKWlllNEptdlgzOTRaYVdDeElFdzNoNmZTWGMrc0pONW04VjFLaEM4Tjd5OD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"cert",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed-node1] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ2g4eG5LRXJBSlN4MDYKbFRPMHc5SnpsU1g0TXhTN3NmSG5scVZXMlZROG45a3RoT3hTbFhwazg5TnlWVXJ6RmtFUENFaVlKSDRKalhlSApBN3QrRmdBTUtzbHlyZjdNUmxLU0Z2aGtNRzBra2dNY3gvd1liNzdNWXpPTmhPZnRwa1RSTWo0NXRSK2FpOEQrCjhKVElNLzlLNVFDdTFyL1A5SU1JelY4RW5BVm1ZMDZIS3VTU1JMb2x0dHhMcjBsUWh5d3hrajRmT1lRVFhmVXcKSVpEeEF4NUhFaFlWWi9Wb0F1MENJVFZCS1QrLzdMS2pWQlMwT1N1VnhUdkVuTldwK2MzVkhTeTFyY2liZnB4MgpGSXZpWmhLS1I1TXFBalM5eUJNNFR2eDVwZ1ZTL2xBM2lWTVBaWkt1R3lReVVrd3F6RmZkT1RneFl5dld4WllRClRaWDFqbmNaQWdNQkFBRUNnZ0VBQW5lMXQwM0RiTHpUZ3UvcmpxLzQwMXN5TGRRNkZ4QmpkeUVsUDB2R25BN2UKZHdldERId1dhNWFJZ2dLT2hyZUZMR3dMK1I5OTQ4K0o3WjdQQmFFNFduY2JieThtQmFtZytwOWdKUWRVM21kZAplMGFKSkR2WVVkSndJbWVrUmd0NnhjbytIRkI5Y2w3UFJSaUMxOWhzK3lYcFN3QzBMRlhBTDdYbURQMVltcmVzCmwxSU9YSm9mVmZMOEJNTytlMjI2YmhiOEdqNC9Sa1lWS0ZET05mc09KeEkzQzRGNjBCZG1xTm41cE54eWh2MjUKV04zaVZJWXRaclZVdUUwQ3JldmRIVG9UVkkrc1BOVm9ld2tVR0J3ZXBWdnNuNDIrYThXRDA1SjRVZGF4cnE4VAp5Q3BUdmdVTXk1L3RoeDB5TE96ckRneDJlRGhqVHhaUkhxbFlwNkl6MVFLQmdRRGMybFVFVnZUZ2pKd1ZGV1UvCk1MSWRBdStjRGR6RFNJTmVJMDNxMXZvb1hJeWViSXR1d3Z0VkRpZC9RSHMzVGFvNDVWUittRTQrN3VrUHZ4dGsKZmRCdmlEbmhyMkY0UUJUdkQ5UHJCVU1ETnJFd3FQN0pCOEZ3SjRRVmdIYWhLQXRpUWZYelFjb2Z3dk84eUpvRwpLVlhsNDFwSm1wcXdjWTliZkpMUmxPWWxsUUtCZ1FDN3VRU0hyb2VsYzltUU9POWZqa3JQR0dWdzcreVJmTzlKCm9oVVVLRjJuaXhwTW1Sd3hlSnBHbVkwdlhsWDFSdy9vR3NrSmoyTFFrUTFwZmxmYUs3T3dSVWJLUUtRcmUxdFQKYmk4L2RpeDFWOWVYVEk2Y2IvMjVmeFM0UjJUTGZmY2xuUVJZNWpVT3pvTWZHNW1RVjF6eEx2Und3TklMb0p6MAp2cmpJM251aWRRS0JnQldkVUtEQ0dlazRHYloraDVBdHBlRHMvcUpaUVBmanl1ZVZzUFNBNGE2OWtBMlpOZlE3CkpQbm9Ecm1IaUFlYmxFTkpQb0c5U2pzaWlyQzkxV0o2WmNIa3BUV1BRb3pRU0x3MDRscW9BeXozVjFRWjFnVzYKdXBYTmFFSHFkckhEQjRvd1JtL3VTbWZkWHBsbFVKMjF3ZkJqMWR4S1EwVnRkOGhPWXRnY0tJSVpBb0dBTitYNQpGbG4vSkdKaDU1TThuRXludEJGRXE2T1Y0eUM3dEVaaVZuVktwZ2hDMVlwR0hla1dyMjByNEc0TDFKVExoTWJ0CkZENnhJdi8rYVdwb3lKdDR6MFZodGZvWHFLYjc0OVVNbHE0ZXdQWmVTVHhQdnYrT0l2MVhjeVEzUEcwTytoRk8KYVJacnVEUUFVWFFMZnpLMGgxRC9FdEFuKzdSZzZIUWVYQm43NEwwQ2dZRUF4KzFuckZrQU0xVTd5ZnlDVSt4dApFTEZtbDNGN2Y1NEp2TjM3Yzdxek5kWlRRNkRISlErOHVtaXZGQkRLaUJaZ2xqMEJrcGxMYUR3dkgwZ2NnbDYzCkRDdy9RQzB2VFB0K3VRZkhZZ2RGcTJ4dGlUdXk1M0VDV21kMUdqY2JzZkdnMUJwRVlYeENBdFBESEJyM3dVdUoKNk9QOGxGMXE5SDNFVk5wYTlvQ1BmaDQ9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
"encoding": "base64",
"item": [
"key",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed-node1] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRS3M4a3JyUkxSN21QQXlhY1R3VjBSVEFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTW1GagpaakkwWVdVdFlqUTBZalEzWWprdE9HWXdNekkyT1dNdE5HWXdOVGMwTkRNd0hoY05NalF4TWpBek1qTXlOVFU1CldoY05NalV4TWpBek1qTXlNVEkwV2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ2g4eG5LRXJBSlN4MDZsVE8wdzlKemxTWDRNeFM3c2ZIbgpscVZXMlZROG45a3RoT3hTbFhwazg5TnlWVXJ6RmtFUENFaVlKSDRKalhlSEE3dCtGZ0FNS3NseXJmN01SbEtTCkZ2aGtNRzBra2dNY3gvd1liNzdNWXpPTmhPZnRwa1RSTWo0NXRSK2FpOEQrOEpUSU0vOUs1UUN1MXIvUDlJTUkKelY4RW5BVm1ZMDZIS3VTU1JMb2x0dHhMcjBsUWh5d3hrajRmT1lRVFhmVXdJWkR4QXg1SEVoWVZaL1ZvQXUwQwpJVFZCS1QrLzdMS2pWQlMwT1N1VnhUdkVuTldwK2MzVkhTeTFyY2liZnB4MkZJdmlaaEtLUjVNcUFqUzl5Qk00ClR2eDVwZ1ZTL2xBM2lWTVBaWkt1R3lReVVrd3F6RmZkT1RneFl5dld4WllRVFpYMWpuY1pBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVCZTU5CjFtSTQ1YWpFS08xaHBmbHAyaEdsa1Q0d0h3WURWUjBqQkJnd0ZvQVVuQ1pxSk5XYVd3VHo3MXNmQUNPdmFEdTgKaXF3d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFKa0oyUEdzN3dFNU5WYk44UGRWYVdQbUVYdlhpV2k4NHErSAp2cGxGemNVV2k0RmpDOVkyZ3hDNnFDMXpEYmMxWnlTek9abk9GSm54allpYjhka2hUR2N6VndOMTZHU1RlNk9tCk1ZWUpkc0x2dTBXSndMb0tnVk5idk1mVlBzL1JlZmh3QzRQSE5Ydk5yUC9ocmlEV2NKbGNqbmhCanZZK3B3MXAKZVM2WjFtZFZuM0lkb1ZtNGNIbEZUSFd1dDRyOSswQzJkTkx6b1J3Q29aRWZBQWZtZzViWm1WOWNmd3ZxdmpqQwp3YW43b2wxNnYwamlEMTh0RHhFMWNPQkEwL3pUTTFUam5mM3dUY1lmVXQ4Mi9pT1ZSZkZiU25GaVZZcStyKzc1CjI0cTB4bVRKWlllNEptdlgzOTRaYVdDeElFdzNoNmZTWGMrc0pONW04VjFLaEM4Tjd5OD0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"ca",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160
Tuesday 03 December 2024 18:26:01 -0500 (0:00:01.541) 0:00:09.393 ******
ok: [managed-node1] => {
"ansible_facts": {
"certificate_test_certs": {
"quadlet_demo": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCh8xnKErAJSx06\nlTO0w9JzlSX4MxS7sfHnlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeH\nA7t+FgAMKslyrf7MRlKSFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+\n8JTIM/9K5QCu1r/P9IMIzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUw\nIZDxAx5HEhYVZ/VoAu0CITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2\nFIviZhKKR5MqAjS9yBM4Tvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQ\nTZX1jncZAgMBAAECggEAAne1t03DbLzTgu/rjq/401syLdQ6FxBjdyElP0vGnA7e\ndwetDHwWa5aIggKOhreFLGwL+R9948+J7Z7PBaE4Wncbby8mBamg+p9gJQdU3mdd\ne0aJJDvYUdJwImekRgt6xco+HFB9cl7PRRiC19hs+yXpSwC0LFXAL7XmDP1Ymres\nl1IOXJofVfL8BMO+e226bhb8Gj4/RkYVKFDONfsOJxI3C4F60BdmqNn5pNxyhv25\nWN3iVIYtZrVUuE0CrevdHToTVI+sPNVoewkUGBwepVvsn42+a8WD05J4Udaxrq8T\nyCpTvgUMy5/thx0yLOzrDgx2eDhjTxZRHqlYp6Iz1QKBgQDc2lUEVvTgjJwVFWU/\nMLIdAu+cDdzDSINeI03q1vooXIyebItuwvtVDid/QHs3Tao45VR+mE4+7ukPvxtk\nfdBviDnhr2F4QBTvD9PrBUMDNrEwqP7JB8FwJ4QVgHahKAtiQfXzQcofwvO8yJoG\nKVXl41pJmpqwcY9bfJLRlOYllQKBgQC7uQSHroelc9mQOO9fjkrPGGVw7+yRfO9J\nohUUKF2nixpMmRwxeJpGmY0vXlX1Rw/oGskJj2LQkQ1pflfaK7OwRUbKQKQre1tT\nbi8/dix1V9eXTI6cb/25fxS4R2TLffclnQRY5jUOzoMfG5mQV1zxLvRwwNILoJz0\nvrjI3nuidQKBgBWdUKDCGek4GbZ+h5AtpeDs/qJZQPfjyueVsPSA4a69kA2ZNfQ7\nJPnoDrmHiAeblENJPoG9SjsiirC91WJ6ZcHkpTWPQozQSLw04lqoAyz3V1QZ1gW6\nupXNaEHqdrHDB4owRm/uSmfdXpllUJ21wfBj1dxKQ0Vtd8hOYtgcKIIZAoGAN+X5\nFln/JGJh55M8nEyntBFEq6OV4yC7tEZiVnVKpghC1YpGHekWr20r4G4L1JTLhMbt\nFD6xIv/+aWpoyJt4z0VhtfoXqKb749UMlq4ewPZeSTxPvv+OIv1XcyQ3PG0O+hFO\naRZruDQAUXQLfzK0h1D/EtAn+7Rg6HQeXBn74L0CgYEAx+1nrFkAM1U7yfyCU+xt\nELFml3F7f54JvN37c7qzNdZTQ6DHJQ+8umivFBDKiBZglj0BkplLaDwvH0gcgl63\nDCw/QC0vTPt+uQfHYgdFq2xtiTuy53ECWmd1GjcbsfGg1BpEYXxCAtPDHBr3wUuJ\n6OP8lF1q9H3EVNpa9oCPfh4=\n-----END PRIVATE KEY-----\n"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176
Tuesday 03 December 2024 18:26:01 -0500 (0:00:00.063) 0:00:09.456 ******
ok: [managed-node1] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCh8xnKErAJSx06\nlTO0w9JzlSX4MxS7sfHnlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeH\nA7t+FgAMKslyrf7MRlKSFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+\n8JTIM/9K5QCu1r/P9IMIzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUw\nIZDxAx5HEhYVZ/VoAu0CITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2\nFIviZhKKR5MqAjS9yBM4Tvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQ\nTZX1jncZAgMBAAECggEAAne1t03DbLzTgu/rjq/401syLdQ6FxBjdyElP0vGnA7e\ndwetDHwWa5aIggKOhreFLGwL+R9948+J7Z7PBaE4Wncbby8mBamg+p9gJQdU3mdd\ne0aJJDvYUdJwImekRgt6xco+HFB9cl7PRRiC19hs+yXpSwC0LFXAL7XmDP1Ymres\nl1IOXJofVfL8BMO+e226bhb8Gj4/RkYVKFDONfsOJxI3C4F60BdmqNn5pNxyhv25\nWN3iVIYtZrVUuE0CrevdHToTVI+sPNVoewkUGBwepVvsn42+a8WD05J4Udaxrq8T\nyCpTvgUMy5/thx0yLOzrDgx2eDhjTxZRHqlYp6Iz1QKBgQDc2lUEVvTgjJwVFWU/\nMLIdAu+cDdzDSINeI03q1vooXIyebItuwvtVDid/QHs3Tao45VR+mE4+7ukPvxtk\nfdBviDnhr2F4QBTvD9PrBUMDNrEwqP7JB8FwJ4QVgHahKAtiQfXzQcofwvO8yJoG\nKVXl41pJmpqwcY9bfJLRlOYllQKBgQC7uQSHroelc9mQOO9fjkrPGGVw7+yRfO9J\nohUUKF2nixpMmRwxeJpGmY0vXlX1Rw/oGskJj2LQkQ1pflfaK7OwRUbKQKQre1tT\nbi8/dix1V9eXTI6cb/25fxS4R2TLffclnQRY5jUOzoMfG5mQV1zxLvRwwNILoJz0\nvrjI3nuidQKBgBWdUKDCGek4GbZ+h5AtpeDs/qJZQPfjyueVsPSA4a69kA2ZNfQ7\nJPnoDrmHiAeblENJPoG9SjsiirC91WJ6ZcHkpTWPQozQSLw04lqoAyz3V1QZ1gW6\nupXNaEHqdrHDB4owRm/uSmfdXpllUJ21wfBj1dxKQ0Vtd8hOYtgcKIIZAoGAN+X5\nFln/JGJh55M8nEyntBFEq6OV4yC7tEZiVnVKpghC1YpGHekWr20r4G4L1JTLhMbt\nFD6xIv/+aWpoyJt4z0VhtfoXqKb749UMlq4ewPZeSTxPvv+OIv1XcyQ3PG0O+hFO\naRZruDQAUXQLfzK0h1D/EtAn+7Rg6HQeXBn74L0CgYEAx+1nrFkAM1U7yfyCU+xt\nELFml3F7f54JvN37c7qzNdZTQ6DHJQ+8umivFBDKiBZglj0BkplLaDwvH0gcgl63\nDCw/QC0vTPt+uQfHYgdFq2xtiTuy53ECWmd1GjcbsfGg1BpEYXxCAtPDHBr3wUuJ\n6OP8lF1q9H3EVNpa9oCPfh4=\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n'}) => {
"ansible_loop_var": "item",
"changed": false,
"cmd": [
"getcert",
"stop-tracking",
"-f",
"/etc/pki/tls/certs/quadlet_demo.crt"
],
"delta": "0:00:00.029409",
"end": "2024-12-03 18:26:01.715472",
"item": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQKs8krrRLR7mPAyacTwV0RTANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMmFj\nZjI0YWUtYjQ0YjQ3YjktOGYwMzI2OWMtNGYwNTc0NDMwHhcNMjQxMjAzMjMyNTU5\nWhcNMjUxMjAzMjMyMTI0WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCh8xnKErAJSx06lTO0w9JzlSX4MxS7sfHn\nlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeHA7t+FgAMKslyrf7MRlKS\nFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+8JTIM/9K5QCu1r/P9IMI\nzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUwIZDxAx5HEhYVZ/VoAu0C\nITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2FIviZhKKR5MqAjS9yBM4\nTvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQTZX1jncZAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUBe59\n1mI45ajEKO1hpflp2hGlkT4wHwYDVR0jBBgwFoAUnCZqJNWaWwTz71sfACOvaDu8\niqwwDQYJKoZIhvcNAQELBQADggEBAJkJ2PGs7wE5NVbN8PdVaWPmEXvXiWi84q+H\nvplFzcUWi4FjC9Y2gxC6qC1zDbc1ZySzOZnOFJnxjYib8dkhTGczVwN16GSTe6Om\nMYYJdsLvu0WJwLoKgVNbvMfVPs/RefhwC4PHNXvNrP/hriDWcJlcjnhBjvY+pw1p\neS6Z1mdVn3IdoVm4cHlFTHWut4r9+0C2dNLzoRwCoZEfAAfmg5bZmV9cfwvqvjjC\nwan7ol16v0jiD18tDxE1cOBA0/zTM1Tjnf3wTcYfUt82/iOVRfFbSnFiVYq+r+75\n24q0xmTJZYe4JmvX394ZaWCxIEw3h6fSXc+sJN5m8V1KhC8N7y8=\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCh8xnKErAJSx06\nlTO0w9JzlSX4MxS7sfHnlqVW2VQ8n9kthOxSlXpk89NyVUrzFkEPCEiYJH4JjXeH\nA7t+FgAMKslyrf7MRlKSFvhkMG0kkgMcx/wYb77MYzONhOftpkTRMj45tR+ai8D+\n8JTIM/9K5QCu1r/P9IMIzV8EnAVmY06HKuSSRLolttxLr0lQhywxkj4fOYQTXfUw\nIZDxAx5HEhYVZ/VoAu0CITVBKT+/7LKjVBS0OSuVxTvEnNWp+c3VHSy1rcibfpx2\nFIviZhKKR5MqAjS9yBM4Tvx5pgVS/lA3iVMPZZKuGyQyUkwqzFfdOTgxYyvWxZYQ\nTZX1jncZAgMBAAECggEAAne1t03DbLzTgu/rjq/401syLdQ6FxBjdyElP0vGnA7e\ndwetDHwWa5aIggKOhreFLGwL+R9948+J7Z7PBaE4Wncbby8mBamg+p9gJQdU3mdd\ne0aJJDvYUdJwImekRgt6xco+HFB9cl7PRRiC19hs+yXpSwC0LFXAL7XmDP1Ymres\nl1IOXJofVfL8BMO+e226bhb8Gj4/RkYVKFDONfsOJxI3C4F60BdmqNn5pNxyhv25\nWN3iVIYtZrVUuE0CrevdHToTVI+sPNVoewkUGBwepVvsn42+a8WD05J4Udaxrq8T\nyCpTvgUMy5/thx0yLOzrDgx2eDhjTxZRHqlYp6Iz1QKBgQDc2lUEVvTgjJwVFWU/\nMLIdAu+cDdzDSINeI03q1vooXIyebItuwvtVDid/QHs3Tao45VR+mE4+7ukPvxtk\nfdBviDnhr2F4QBTvD9PrBUMDNrEwqP7JB8FwJ4QVgHahKAtiQfXzQcofwvO8yJoG\nKVXl41pJmpqwcY9bfJLRlOYllQKBgQC7uQSHroelc9mQOO9fjkrPGGVw7+yRfO9J\nohUUKF2nixpMmRwxeJpGmY0vXlX1Rw/oGskJj2LQkQ1pflfaK7OwRUbKQKQre1tT\nbi8/dix1V9eXTI6cb/25fxS4R2TLffclnQRY5jUOzoMfG5mQV1zxLvRwwNILoJz0\nvrjI3nuidQKBgBWdUKDCGek4GbZ+h5AtpeDs/qJZQPfjyueVsPSA4a69kA2ZNfQ7\nJPnoDrmHiAeblENJPoG9SjsiirC91WJ6ZcHkpTWPQozQSLw04lqoAyz3V1QZ1gW6\nupXNaEHqdrHDB4owRm/uSmfdXpllUJ21wfBj1dxKQ0Vtd8hOYtgcKIIZAoGAN+X5\nFln/JGJh55M8nEyntBFEq6OV4yC7tEZiVnVKpghC1YpGHekWr20r4G4L1JTLhMbt\nFD6xIv/+aWpoyJt4z0VhtfoXqKb749UMlq4ewPZeSTxPvv+OIv1XcyQ3PG0O+hFO\naRZruDQAUXQLfzK0h1D/EtAn+7Rg6HQeXBn74L0CgYEAx+1nrFkAM1U7yfyCU+xt\nELFml3F7f54JvN37c7qzNdZTQ6DHJQ+8umivFBDKiBZglj0BkplLaDwvH0gcgl63\nDCw/QC0vTPt+uQfHYgdFq2xtiTuy53ECWmd1GjcbsfGg1BpEYXxCAtPDHBr3wUuJ\n6OP8lF1q9H3EVNpa9oCPfh4=\n-----END PRIVATE KEY-----\n"
},
"rc": 0,
"start": "2024-12-03 18:26:01.686063"
}
STDOUT:
Request "20241203232559" removed.
TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181
Tuesday 03 December 2024 18:26:01 -0500 (0:00:00.676) 0:00:10.133 ******
changed: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
changed: [managed-node1] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/private/quadlet_demo.key",
"path": "/etc/pki/tls/private/quadlet_demo.key",
"state": "absent"
}
ok: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": false,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
TASK [Run the role] ************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62
Tuesday 03 December 2024 18:26:03 -0500 (0:00:01.288) 0:00:11.421 ******
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Tuesday 03 December 2024 18:26:03 -0500 (0:00:00.069) 0:00:11.491 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Tuesday 03 December 2024 18:26:03 -0500 (0:00:00.027) 0:00:11.518 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Tuesday 03 December 2024 18:26:03 -0500 (0:00:00.035) 0:00:11.554 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Tuesday 03 December 2024 18:26:03 -0500 (0:00:00.435) 0:00:11.989 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Tuesday 03 December 2024 18:26:03 -0500 (0:00:00.023) 0:00:12.013 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Tuesday 03 December 2024 18:26:04 -0500 (0:00:00.420) 0:00:12.434 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Tuesday 03 December 2024 18:26:04 -0500 (0:00:00.026) 0:00:12.460 ******
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
ok: [managed-node1] => (item=Fedora.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "Fedora.yml"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Tuesday 03 December 2024 18:26:04 -0500 (0:00:00.042) 0:00:12.503 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Tuesday 03 December 2024 18:26:05 -0500 (0:00:01.252) 0:00:13.756 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Tuesday 03 December 2024 18:26:05 -0500 (0:00:00.047) 0:00:13.803 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages))",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Tuesday 03 December 2024 18:26:05 -0500 (0:00:00.054) 0:00:13.858 ******
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Tuesday 03 December 2024 18:26:05 -0500 (0:00:00.048) 0:00:13.907 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Tuesday 03 December 2024 18:26:05 -0500 (0:00:00.043) 0:00:13.951 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Tuesday 03 December 2024 18:26:05 -0500 (0:00:00.043) 0:00:13.995 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.033870",
"end": "2024-12-03 18:26:06.050932",
"rc": 0,
"start": "2024-12-03 18:26:06.017062"
}
STDOUT:
podman version 5.3.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.488) 0:00:14.483 ******
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.3.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.038) 0:00:14.521 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.039) 0:00:14.561 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.196) 0:00:14.757 ******
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.101) 0:00:14.858 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.049) 0:00:14.907 ******
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.055) 0:00:14.963 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:06 -0500 (0:00:00.084) 0:00:15.047 ******
ok: [managed-node1] => {
"ansible_facts": {
"getent_passwd": {
"root": [
"x",
"0",
"0",
"Super User",
"/root",
"/bin/bash"
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.604) 0:00:15.652 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.057) 0:00:15.710 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.055) 0:00:15.766 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.455) 0:00:16.221 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.042) 0:00:16.263 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:07 -0500 (0:00:00.052) 0:00:16.316 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.041) 0:00:16.358 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.036) 0:00:16.395 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.036) 0:00:16.432 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.043) 0:00:16.475 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.042) 0:00:16.517 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.054) 0:00:16.572 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.101) 0:00:16.673 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.101) 0:00:16.774 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.051) 0:00:16.826 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.064) 0:00:16.891 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.143) 0:00:17.035 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.055) 0:00:17.090 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.068) 0:00:17.158 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Tuesday 03 December 2024 18:26:08 -0500 (0:00:00.144) 0:00:17.303 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.054) 0:00:17.357 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.052) 0:00:17.410 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.108) 0:00:17.518 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.036) 0:00:17.555 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.042) 0:00:17.597 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.041) 0:00:17.638 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.053) 0:00:17.692 ******
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.136) 0:00:17.829 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.074) 0:00:17.903 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Tuesday 03 December 2024 18:26:09 -0500 (0:00:00.059) 0:00:17.962 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Tuesday 03 December 2024 18:26:10 -0500 (0:00:00.463) 0:00:18.426 ******
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Tuesday 03 December 2024 18:26:10 -0500 (0:00:00.044) 0:00:18.470 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Tuesday 03 December 2024 18:26:10 -0500 (0:00:00.478) 0:00:18.948 ******
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Tuesday 03 December 2024 18:26:10 -0500 (0:00:00.060) 0:00:19.009 ******
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Tuesday 03 December 2024 18:26:12 -0500 (0:00:01.401) 0:00:20.410 ******
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.035) 0:00:20.446 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.032) 0:00:20.479 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.032) 0:00:20.511 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.030) 0:00:20.542 ******
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.040) 0:00:20.582 ******
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target sysinit.target dbus.socket polkit.service dbus-broker.service system.slice",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service iptables.service ebtables.service shutdown.target ip6tables.service",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3461406720",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Tuesday 03 December 2024 18:26:12 -0500 (0:00:00.640) 0:00:21.223 ******
changed: [managed-node1] => {
"changed": true,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "dbus.socket system.slice basic.target polkit.service sysinit.target dbus-broker.service",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ip6tables.service shutdown.target ipset.service iptables.service ebtables.service",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3468148736",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Tuesday 03 December 2024 18:26:14 -0500 (0:00:01.380) 0:00:22.604 ******
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Tuesday 03 December 2024 18:26:14 -0500 (0:00:00.047) 0:00:22.651 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Tuesday 03 December 2024 18:26:14 -0500 (0:00:00.036) 0:00:22.687 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Tuesday 03 December 2024 18:26:14 -0500 (0:00:00.039) 0:00:22.727 ******
changed: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
changed: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Tuesday 03 December 2024 18:26:15 -0500 (0:00:01.434) 0:00:24.161 ******
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Tuesday 03 December 2024 18:26:15 -0500 (0:00:00.098) 0:00:24.260 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.123) 0:00:24.384 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.060) 0:00:24.444 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.061) 0:00:24.506 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.082) 0:00:24.588 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.078) 0:00:24.667 ******
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.094) 0:00:24.762 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.054) 0:00:24.816 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.064) 0:00:24.881 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.049) 0:00:24.931 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.049) 0:00:24.981 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.284) 0:00:25.265 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:26:16 -0500 (0:00:00.060) 0:00:25.326 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.060) 0:00:25.386 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.064) 0:00:25.451 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.044) 0:00:25.495 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.038) 0:00:25.534 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.034) 0:00:25.568 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:26:17 -0500 (0:00:00.037) 0:00:25.606 ******
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.883) 0:00:26.489 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.058) 0:00:26.548 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.074) 0:00:26.623 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.091) 0:00:26.714 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.086) 0:00:26.801 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.050) 0:00:26.851 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.039) 0:00:26.891 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:26:18 -0500 (0:00:00.040) 0:00:26.931 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.630) 0:00:27.562 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.079) 0:00:27.641 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.077) 0:00:27.719 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.114) 0:00:27.833 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.074) 0:00:27.908 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.053) 0:00:27.961 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.093) 0:00:28.054 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:26:19 -0500 (0:00:00.060) 0:00:28.115 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Tuesday 03 December 2024 18:26:20 -0500 (0:00:00.679) 0:00:28.795 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Tuesday 03 December 2024 18:26:20 -0500 (0:00:00.077) 0:00:28.872 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:20 -0500 (0:00:00.315) 0:00:29.188 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:20 -0500 (0:00:00.073) 0:00:29.261 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:20 -0500 (0:00:00.067) 0:00:29.328 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.059) 0:00:29.387 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.076) 0:00:29.464 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.070) 0:00:29.535 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.043) 0:00:29.579 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.052) 0:00:29.631 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.068) 0:00:29.699 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.449) 0:00:30.148 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.031) 0:00:30.179 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.036) 0:00:30.216 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:21 -0500 (0:00:00.043) 0:00:30.259 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.097) 0:00:30.356 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.071) 0:00:30.428 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.084) 0:00:30.513 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.079) 0:00:30.592 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.061) 0:00:30.654 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.073) 0:00:30.728 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.050) 0:00:30.778 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.081) 0:00:30.860 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.081) 0:00:30.941 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.040) 0:00:30.981 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.040) 0:00:31.022 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.086) 0:00:31.108 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.060) 0:00:31.168 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.053) 0:00:31.222 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.053) 0:00:31.275 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:22 -0500 (0:00:00.054) 0:00:31.330 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:26:23 -0500 (0:00:00.059) 0:00:31.390 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:26:23 -0500 (0:00:00.057) 0:00:31.447 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:23 -0500 (0:00:00.473) 0:00:31.920 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"dest": "/etc/containers/systemd/quadlet-demo.network",
"gid": 0,
"group": "root",
"md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 74,
"src": "/root/.ansible/tmp/ansible-tmp-1733268383.622994-17960-97040239360129/.source.network",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:26:24 -0500 (0:00:00.939) 0:00:32.860 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:26:24 -0500 (0:00:00.060) 0:00:32.920 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:26:24 -0500 (0:00:00.046) 0:00:32.966 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:26:25 -0500 (0:00:00.931) 0:00:33.898 ******
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-network.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "sysinit.target -.mount systemd-journald.socket basic.target system.slice network-online.target",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3405926400",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "-.mount sysinit.target system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.network",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.728) 0:00:34.627 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.050) 0:00:34.677 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.063) 0:00:34.741 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.047) 0:00:34.788 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.035) 0:00:34.823 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.055) 0:00:34.879 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.064) 0:00:34.943 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.039) 0:00:34.982 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.042) 0:00:35.025 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:26 -0500 (0:00:00.078) 0:00:35.103 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.509) 0:00:35.613 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.056) 0:00:35.669 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.055) 0:00:35.725 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.039) 0:00:35.764 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.037) 0:00:35.802 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.044) 0:00:35.846 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.041) 0:00:35.887 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.038) 0:00:35.925 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.037) 0:00:35.963 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.060) 0:00:36.024 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.037) 0:00:36.061 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.040) 0:00:36.101 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.077) 0:00:36.178 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.040) 0:00:36.219 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:27 -0500 (0:00:00.046) 0:00:36.266 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.094) 0:00:36.360 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.071) 0:00:36.432 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.041) 0:00:36.473 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.039) 0:00:36.512 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.035) 0:00:36.547 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.030) 0:00:36.578 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.038) 0:00:36.616 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:28 -0500 (0:00:00.449) 0:00:37.066 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"gid": 0,
"group": "root",
"md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 9,
"src": "/root/.ansible/tmp/ansible-tmp-1733268388.7680979-18202-148706570473563/.source.volume",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:26:29 -0500 (0:00:00.828) 0:00:37.895 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:26:29 -0500 (0:00:00.059) 0:00:37.954 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:26:29 -0500 (0:00:00.056) 0:00:38.010 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:26:30 -0500 (0:00:00.947) 0:00:38.958 ******
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql-volume.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target sysinit.target -.mount system.slice network-online.target systemd-journald.socket",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3428757504",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "system.slice -.mount sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.735) 0:00:39.693 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.052) 0:00:39.746 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.100) 0:00:39.847 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.041) 0:00:39.889 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.034) 0:00:39.923 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.049) 0:00:39.973 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.145) 0:00:40.118 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:31 -0500 (0:00:00.132) 0:00:40.250 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.100) 0:00:40.351 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.100) 0:00:40.451 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.605) 0:00:41.056 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.126) 0:00:41.183 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.078) 0:00:41.261 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:32 -0500 (0:00:00.055) 0:00:41.317 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.054) 0:00:41.372 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.066) 0:00:41.438 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.100) 0:00:41.538 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.065) 0:00:41.604 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.048) 0:00:41.652 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.072) 0:00:41.724 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.039) 0:00:41.764 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.033) 0:00:41.797 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.080) 0:00:41.878 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.052) 0:00:41.930 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.050) 0:00:41.980 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.090) 0:00:42.071 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.073) 0:00:42.144 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.038) 0:00:42.183 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.034) 0:00:42.218 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:33 -0500 (0:00:00.037) 0:00:42.255 ******
changed: [managed-node1] => (item=/tmp/quadlet_demo) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/quadlet_demo",
"mode": "0777",
"owner": "root",
"path": "/tmp/quadlet_demo",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 40,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:26:34 -0500 (0:00:00.495) 0:00:42.750 ******
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:26:42 -0500 (0:00:08.270) 0:00:51.021 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:43 -0500 (0:00:00.452) 0:00:51.473 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:26:43 -0500 (0:00:00.035) 0:00:51.509 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
"gid": 0,
"group": "root",
"md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 363,
"src": "/root/.ansible/tmp/ansible-tmp-1733268403.2128954-18788-105550152898841/.source.container",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:26:43 -0500 (0:00:00.802) 0:00:52.311 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:26:43 -0500 (0:00:00.032) 0:00:52.344 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:26:44 -0500 (0:00:00.887) 0:00:53.231 ******
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "quadlet-demo-network.service sysinit.target -.mount tmp.mount system.slice basic.target systemd-journald.socket network-online.target quadlet-demo-mysql-volume.service",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuset io memory pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3367038976",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "continue",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice -.mount tmp.mount quadlet-demo-mysql-volume.service quadlet-demo-network.service sysinit.target",
"RequiresMountsFor": "/tmp/quadlet_demo /run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:26:45 -0500 (0:00:01.062) 0:00:54.294 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:45 -0500 (0:00:00.035) 0:00:54.329 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.053) 0:00:54.383 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.067) 0:00:54.450 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.057) 0:00:54.508 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.086) 0:00:54.594 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.111) 0:00:54.706 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.061) 0:00:54.767 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.062) 0:00:54.830 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:46 -0500 (0:00:00.072) 0:00:54.902 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.479) 0:00:55.381 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.060) 0:00:55.442 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.037) 0:00:55.479 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.037) 0:00:55.517 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.052) 0:00:55.569 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.054) 0:00:55.623 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.050) 0:00:55.673 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.045) 0:00:55.718 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.038) 0:00:55.757 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.066) 0:00:55.824 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.037) 0:00:55.861 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.035) 0:00:55.897 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.077) 0:00:55.974 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.046) 0:00:56.021 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.033) 0:00:56.055 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.087) 0:00:56.143 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.080) 0:00:56.223 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:47 -0500 (0:00:00.110) 0:00:56.334 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:48 -0500 (0:00:00.038) 0:00:56.372 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:48 -0500 (0:00:00.038) 0:00:56.411 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:26:48 -0500 (0:00:00.035) 0:00:56.446 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:26:48 -0500 (0:00:00.033) 0:00:56.479 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:48 -0500 (0:00:00.470) 0:00:56.950 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"gid": 0,
"group": "root",
"md5sum": "aec75d972c231aac004e1338934544cf",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 2102,
"src": "/root/.ansible/tmp/ansible-tmp-1733268408.653214-18942-158494762108738/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:26:49 -0500 (0:00:00.935) 0:00:57.885 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:26:49 -0500 (0:00:00.042) 0:00:57.928 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:26:49 -0500 (0:00:00.038) 0:00:57.966 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:26:50 -0500 (0:00:01.021) 0:00:58.988 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.039) 0:00:59.027 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.038) 0:00:59.066 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.101) 0:00:59.167 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.044) 0:00:59.212 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.037) 0:00:59.249 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:50 -0500 (0:00:00.059) 0:00:59.309 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.158) 0:00:59.467 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.065) 0:00:59.532 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.063) 0:00:59.596 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.077) 0:00:59.674 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.468) 0:01:00.142 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.057) 0:01:00.199 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.064) 0:01:00.263 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:51 -0500 (0:00:00.057) 0:01:00.321 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.057) 0:01:00.378 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.052) 0:01:00.430 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.055) 0:01:00.485 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.076) 0:01:00.562 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.067) 0:01:00.629 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.103) 0:01:00.732 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.063) 0:01:00.796 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.060) 0:01:00.857 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.131) 0:01:00.988 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.045) 0:01:01.033 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.043) 0:01:01.076 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.084) 0:01:01.161 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.056) 0:01:01.218 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.033) 0:01:01.251 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.036) 0:01:01.288 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:52 -0500 (0:00:00.033) 0:01:01.322 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:26:53 -0500 (0:00:00.031) 0:01:01.353 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:26:53 -0500 (0:00:00.032) 0:01:01.385 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:53 -0500 (0:00:00.465) 0:01:01.851 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:26:53 -0500 (0:00:00.039) 0:01:01.891 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"dest": "/etc/containers/systemd/quadlet-demo.yml",
"gid": 0,
"group": "root",
"md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 1605,
"src": "/root/.ansible/tmp/ansible-tmp-1733268413.6047704-19120-87040644995924/.source.yml",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:26:54 -0500 (0:00:01.076) 0:01:02.967 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:26:54 -0500 (0:00:00.060) 0:01:03.028 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:26:56 -0500 (0:00:01.335) 0:01:04.363 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.038) 0:01:04.402 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.037) 0:01:04.439 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.050) 0:01:04.490 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.086) 0:01:04.576 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.057) 0:01:04.634 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.110) 0:01:04.744 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.130) 0:01:04.874 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.131) 0:01:05.006 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.063) 0:01:05.069 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:26:56 -0500 (0:00:00.109) 0:01:05.179 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.532) 0:01:05.711 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.091) 0:01:05.803 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.095) 0:01:05.899 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.101) 0:01:06.001 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.211) 0:01:06.212 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:26:57 -0500 (0:00:00.079) 0:01:06.292 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:26:58 -0500 (0:00:00.105) 0:01:06.397 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:26:58 -0500 (0:00:00.102) 0:01:06.500 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:26:58 -0500 (0:00:00.087) 0:01:06.588 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:26:58 -0500 (0:00:00.132) 0:01:06.720 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:26:58 -0500 (0:00:00.132) 0:01:06.853 ******
ok: [managed-node1] => {
"changed": false,
"content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
"encoding": "base64",
"source": "/etc/containers/systemd/quadlet-demo.yml"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.723) 0:01:07.576 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/wordpress:4.8-apache",
"quay.io/linux-system-roles/envoyproxy:v1.25.0"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": [
"/tmp/httpd3",
"/tmp/httpd3-create"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.193) 0:01:07.770 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.115) 0:01:07.885 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.076) 0:01:07.962 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.175) 0:01:08.137 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.063) 0:01:08.201 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.038) 0:01:08.240 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.039) 0:01:08.280 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Tuesday 03 December 2024 18:26:59 -0500 (0:00:00.040) 0:01:08.320 ******
changed: [managed-node1] => (item=/tmp/httpd3) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 40,
"state": "directory",
"uid": 0
}
changed: [managed-node1] => (item=/tmp/httpd3-create) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3-create",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3-create",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 40,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Tuesday 03 December 2024 18:27:00 -0500 (0:00:00.896) 0:01:09.216 ******
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Tuesday 03 December 2024 18:27:23 -0500 (0:00:22.878) 0:01:32.095 ******
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48
Tuesday 03 December 2024 18:27:24 -0500 (0:00:00.491) 0:01:32.587 ******
changed: [managed-node1] => {
"changed": true,
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"dest": "/etc/containers/systemd/quadlet-demo.kube",
"gid": 0,
"group": "root",
"md5sum": "da53c88f92b68b0487aa209f795b6bb3",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 456,
"src": "/root/.ansible/tmp/ansible-tmp-1733268444.3018425-20033-210175559590704/.source.kube",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58
Tuesday 03 December 2024 18:27:25 -0500 (0:00:00.821) 0:01:33.408 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70
Tuesday 03 December 2024 18:27:25 -0500 (0:00:00.035) 0:01:33.443 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82
Tuesday 03 December 2024 18:27:25 -0500 (0:00:00.039) 0:01:33.483 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
Tuesday 03 December 2024 18:27:26 -0500 (0:00:00.892) 0:01:34.375 ******
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo.service",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "-.mount quadlet-demo-network.service basic.target network-online.target sysinit.target systemd-journald.socket quadlet-demo-mysql.service system.slice",
"AllowIsolate": "no",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "0",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2864549888",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "[not set]",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "[not set]",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice quadlet-demo-mysql.service sysinit.target quadlet-demo-network.service -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "infinity"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125
Tuesday 03 December 2024 18:27:27 -0500 (0:00:01.343) 0:01:35.718 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Tuesday 03 December 2024 18:27:27 -0500 (0:00:00.035) 0:01:35.754 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Tuesday 03 December 2024 18:27:27 -0500 (0:00:00.029) 0:01:35.784 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Tuesday 03 December 2024 18:27:27 -0500 (0:00:00.034) 0:01:35.819 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96
Tuesday 03 December 2024 18:27:27 -0500 (0:00:00.053) 0:01:35.872 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/containers/systemd"
],
"delta": "0:00:00.006186",
"end": "2024-12-03 18:27:27.959127",
"rc": 0,
"start": "2024-12-03 18:27:27.952941"
}
STDOUT:
total 32
drwxr-xr-x. 9 root root 4096 Dec 3 18:22 ../
-rw-r--r--. 1 root root 74 Dec 3 18:26 quadlet-demo.network
-rw-r--r--. 1 root root 9 Dec 3 18:26 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root 363 Dec 3 18:26 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Dec 3 18:26 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Dec 3 18:26 quadlet-demo.yml
-rw-r--r--. 1 root root 456 Dec 3 18:27 quadlet-demo.kube
drwxr-xr-x. 2 root root 4096 Dec 3 18:27 ./
TASK [Check containers] ********************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100
Tuesday 03 December 2024 18:27:28 -0500 (0:00:00.522) 0:01:36.395 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.079958",
"end": "2024-12-03 18:27:28.544315",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:28.464357"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
b40d93c63736 quay.io/linux-system-roles/mysql:5.6 mysqld 43 seconds ago Up 43 seconds (healthy) 3306/tcp quadlet-demo-mysql
7b2bc62adcd1 localhost/podman-pause:5.3.1-1732147200 1 second ago Up 2 seconds a96f3a51b8d1-service
f8b4ef2b7c11 localhost/podman-pause:5.3.1-1732147200 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 4e6f710fa1e9-infra
5256bcfa939d quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
e811bf230cbe quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check volumes] ***********************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105
Tuesday 03 December 2024 18:27:28 -0500 (0:00:00.643) 0:01:37.039 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls"
],
"delta": "0:00:00.037321",
"end": "2024-12-03 18:27:29.132777",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:29.095456"
}
STDOUT:
DRIVER VOLUME NAME
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [Check pods] **************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110
Tuesday 03 December 2024 18:27:29 -0500 (0:00:00.532) 0:01:37.571 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.043894",
"end": "2024-12-03 18:27:29.654960",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:29.611066"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
4e6f710fa1e9 quadlet-demo Running 2 seconds ago f8b4ef2b7c11 f8b4ef2b7c11,5256bcfa939d,e811bf230cbe 4e6f710fa1e9-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115
Tuesday 03 December 2024 18:27:29 -0500 (0:00:00.525) 0:01:38.096 ******
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
"delta": "0:00:00.015580",
"end": "2024-12-03 18:27:30.126844",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:30.111264"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [Check web] ***************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
Tuesday 03 December 2024 18:27:30 -0500 (0:00:00.486) 0:01:38.582 ******
changed: [managed-node1] => {
"attempts": 1,
"changed": true,
"checksum_dest": null,
"checksum_src": "d44182b9a94a268fbf8cab84925a2310d9769790",
"dest": "/run/out",
"elapsed": 0,
"gid": 0,
"group": "root",
"md5sum": "61339352cd041f799671e571f8a7a90c",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:var_run_t:s0",
"size": 11666,
"src": "/root/.ansible/tmp/ansible-tmp-1733268450.2883835-20215-87529649174348/tmpqo0524gv",
"state": "file",
"status_code": 200,
"uid": 0,
"url": "https://localhost:8000"
}
MSG:
OK (unknown bytes)
TASK [Show web] ****************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:132
Tuesday 03 December 2024 18:27:31 -0500 (0:00:01.315) 0:01:39.898 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"cat",
"/run/out"
],
"delta": "0:00:00.004164",
"end": "2024-12-03 18:27:31.893939",
"rc": 0,
"start": "2024-12-03 18:27:31.889775"
}
STDOUT:
WordPress › Installation
WordPress
TASK [Error] *******************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:137
Tuesday 03 December 2024 18:27:31 -0500 (0:00:00.452) 0:01:40.350 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__web_status is failed",
"skip_reason": "Conditional result was False"
}
TASK [Check] *******************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148
Tuesday 03 December 2024 18:27:32 -0500 (0:00:00.034) 0:01:40.385 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.043656",
"end": "2024-12-03 18:27:32.452497",
"rc": 0,
"start": "2024-12-03 18:27:32.408841"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 5 minutes ago Up 5 minutes 127.0.0.1:5000->5000/tcp podman_registry
b40d93c63736 quay.io/linux-system-roles/mysql:5.6 mysqld 46 seconds ago Up 47 seconds (healthy) 3306/tcp quadlet-demo-mysql
7b2bc62adcd1 localhost/podman-pause:5.3.1-1732147200 5 seconds ago Up 6 seconds a96f3a51b8d1-service
f8b4ef2b7c11 localhost/podman-pause:5.3.1-1732147200 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 4e6f710fa1e9-infra
5256bcfa939d quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress
e811bf230cbe quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy
TASK [Check pods] **************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152
Tuesday 03 December 2024 18:27:32 -0500 (0:00:00.496) 0:01:40.882 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.042352",
"end": "2024-12-03 18:27:32.953755",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:32.911403"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
4e6f710fa1e9 quadlet-demo Running 6 seconds ago f8b4ef2b7c11 f8b4ef2b7c11,5256bcfa939d,e811bf230cbe 4e6f710fa1e9-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157
Tuesday 03 December 2024 18:27:33 -0500 (0:00:00.521) 0:01:41.403 ******
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
"delta": "0:00:00.015560",
"end": "2024-12-03 18:27:33.425337",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:33.409777"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [LS] **********************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165
Tuesday 03 December 2024 18:27:33 -0500 (0:00:00.477) 0:01:41.880 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/systemd/system"
],
"delta": "0:00:00.005033",
"end": "2024-12-03 18:27:33.883814",
"failed_when_result": false,
"rc": 0,
"start": "2024-12-03 18:27:33.878781"
}
STDOUT:
total 56
drwxr-xr-x. 5 root root 4096 Dec 3 00:45 ../
drwxr-xr-x. 2 root root 4096 Dec 3 00:45 getty.target.wants/
lrwxrwxrwx. 1 root root 43 Dec 3 00:45 dbus.service -> /usr/lib/systemd/system/dbus-broker.service
lrwxrwxrwx. 1 root root 37 Dec 3 00:45 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
drwxr-xr-x. 2 root root 4096 Dec 3 00:45 systemd-journald.service.wants/
lrwxrwxrwx. 1 root root 45 Dec 3 00:45 dbus-org.freedesktop.home1.service -> /usr/lib/systemd/system/systemd-homed.service
drwxr-xr-x. 2 root root 4096 Dec 3 00:45 systemd-homed.service.wants/
lrwxrwxrwx. 1 root root 44 Dec 3 00:45 dbus-org.freedesktop.oom1.service -> /usr/lib/systemd/system/systemd-oomd.service
drwxr-xr-x. 2 root root 4096 Dec 3 00:46 graphical.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 00:46 network-online.target.wants/
lrwxrwxrwx. 1 root root 57 Dec 3 00:46 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
lrwxrwxrwx. 1 root root 41 Dec 3 00:46 dbus-org.bluez.service -> /usr/lib/systemd/system/bluetooth.service
drwxr-xr-x. 2 root root 4096 Dec 3 00:46 bluetooth.target.wants/
lrwxrwxrwx. 1 root root 48 Dec 3 00:46 dbus-org.freedesktop.resolve1.service -> /usr/lib/systemd/system/systemd-resolved.service
lrwxrwxrwx. 1 root root 41 Dec 3 00:50 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x. 2 root root 4096 Dec 3 02:56 remote-fs.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 02:57 timers.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 02:58 cloud-init.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 02:59 sockets.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 02:59 sysinit.target.wants/
drwxr-xr-x. 2 root root 4096 Dec 3 18:26 multi-user.target.wants/
lrwxrwxrwx. 1 root root 41 Dec 3 18:26 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
drwxr-xr-x. 14 root root 4096 Dec 3 18:26 ./
TASK [Cleanup] *****************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172
Tuesday 03 December 2024 18:27:33 -0500 (0:00:00.456) 0:01:42.337 ******
included: fedora.linux_system_roles.podman for managed-node1
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.164) 0:01:42.501 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.070) 0:01:42.572 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.049) 0:01:42.622 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.043) 0:01:42.665 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.055) 0:01:42.721 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.059) 0:01:42.780 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.043) 0:01:42.824 ******
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
ok: [managed-node1] => (item=Fedora.yml) => {
"ansible_facts": {
"__podman_packages": [
"iptables-nft",
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/Fedora.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "Fedora.yml"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=Fedora_41.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "Fedora_41.yml",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Tuesday 03 December 2024 18:27:34 -0500 (0:00:00.090) 0:01:42.915 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Tuesday 03 December 2024 18:27:35 -0500 (0:00:01.080) 0:01:43.995 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Tuesday 03 December 2024 18:27:35 -0500 (0:00:00.057) 0:01:44.052 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages))",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Tuesday 03 December 2024 18:27:35 -0500 (0:00:00.063) 0:01:44.116 ******
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Tuesday 03 December 2024 18:27:35 -0500 (0:00:00.068) 0:01:44.184 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Tuesday 03 December 2024 18:27:35 -0500 (0:00:00.041) 0:01:44.226 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Tuesday 03 December 2024 18:27:35 -0500 (0:00:00.044) 0:01:44.271 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.029461",
"end": "2024-12-03 18:27:36.310824",
"rc": 0,
"start": "2024-12-03 18:27:36.281363"
}
STDOUT:
podman version 5.3.1
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.473) 0:01:44.744 ******
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "5.3.1"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.097) 0:01:44.841 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.047) 0:01:44.889 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.047) 0:01:44.936 ******
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.046) 0:01:44.983 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.055) 0:01:45.038 ******
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.062) 0:01:45.101 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.088) 0:01:45.189 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.053) 0:01:45.242 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:27:36 -0500 (0:00:00.053) 0:01:45.296 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.059) 0:01:45.356 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.440) 0:01:45.796 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.038) 0:01:45.834 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.040) 0:01:45.875 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.037) 0:01:45.913 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.038) 0:01:45.951 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.037) 0:01:45.989 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.057) 0:01:46.046 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.055) 0:01:46.102 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.054) 0:01:46.156 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.068) 0:01:46.225 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Tuesday 03 December 2024 18:27:37 -0500 (0:00:00.109) 0:01:46.334 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.058) 0:01:46.393 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.056) 0:01:46.449 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.110) 0:01:46.560 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.071) 0:01:46.632 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.073) 0:01:46.706 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.107) 0:01:46.813 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.052) 0:01:46.866 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.059) 0:01:46.926 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.114) 0:01:47.040 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.051) 0:01:47.092 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.039) 0:01:47.132 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.041) 0:01:47.174 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139
Tuesday 03 December 2024 18:27:38 -0500 (0:00:00.040) 0:01:47.215 ******
included: fedora.linux_system_roles.firewall for managed-node1
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.142) 0:01:47.357 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.103) 0:01:47.460 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.069) 0:01:47.530 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.059) 0:01:47.589 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.057) 0:01:47.647 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.078) 0:01:47.726 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Tuesday 03 December 2024 18:27:39 -0500 (0:00:00.079) 0:01:47.805 ******
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Tuesday 03 December 2024 18:27:40 -0500 (0:00:01.504) 0:01:49.310 ******
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Tuesday 03 December 2024 18:27:41 -0500 (0:00:00.081) 0:01:49.391 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Tuesday 03 December 2024 18:27:41 -0500 (0:00:00.110) 0:01:49.502 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Tuesday 03 December 2024 18:27:41 -0500 (0:00:00.110) 0:01:49.613 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Tuesday 03 December 2024 18:27:41 -0500 (0:00:00.086) 0:01:49.699 ******
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Tuesday 03 December 2024 18:27:41 -0500 (0:00:00.070) 0:01:49.770 ******
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:26:14 EST",
"ActiveEnterTimestampMonotonic": "708564442",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice dbus.socket polkit.service dbus-broker.service sysinit.target basic.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:26:13 EST",
"AssertTimestampMonotonic": "708154385",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "482881000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ConditionTimestampMonotonic": "708154381",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ip6tables.service ebtables.service shutdown.target iptables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "136745",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ExecMainHandoffTimestampMonotonic": "708202934",
"ExecMainPID": "66423",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ExecMainStartTimestampMonotonic": "708157464",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:26:13 EST",
"InactiveExitTimestampMonotonic": "708158045",
"InvocationID": "df836847586848d2aecfbc655a5985dc",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "66423",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2841718784",
"MemoryCurrent": "32636928",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "32903168",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:27:25 EST",
"StateChangeTimestampMonotonic": "780364412",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Tuesday 03 December 2024 18:27:42 -0500 (0:00:00.667) 0:01:50.437 ******
ok: [managed-node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:26:14 EST",
"ActiveEnterTimestampMonotonic": "708564442",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "system.slice dbus.socket polkit.service dbus-broker.service sysinit.target basic.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:26:13 EST",
"AssertTimestampMonotonic": "708154385",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "482881000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ConditionTimestampMonotonic": "708154381",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ip6tables.service ebtables.service shutdown.target iptables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlGroupId": "136745",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DeviceAllow": "char-rtc r",
"DevicePolicy": "closed",
"Documentation": "\"man:firewalld(1)\"",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ExecMainHandoffTimestampMonotonic": "708202934",
"ExecMainPID": "66423",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:26:13 EST",
"ExecMainStartTimestampMonotonic": "708157464",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:26:13 EST",
"InactiveExitTimestampMonotonic": "708158045",
"InvocationID": "df836847586848d2aecfbc655a5985dc",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "yes",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "66423",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2841489408",
"MemoryCurrent": "32636928",
"MemoryDenyWriteExecute": "yes",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "32903168",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "yes",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "yes",
"ProtectControlGroups": "yes",
"ProtectHome": "yes",
"ProtectHostname": "yes",
"ProtectKernelLogs": "yes",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "yes",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket sysinit.target system.slice",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "yes",
"RestrictSUIDSGID": "yes",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:27:25 EST",
"StateChangeTimestampMonotonic": "780364412",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallArchitectures": "native",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Tuesday 03 December 2024 18:27:42 -0500 (0:00:00.678) 0:01:51.116 ******
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/bin/python3",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Tuesday 03 December 2024 18:27:42 -0500 (0:00:00.076) 0:01:51.193 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Tuesday 03 December 2024 18:27:42 -0500 (0:00:00.055) 0:01:51.248 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Tuesday 03 December 2024 18:27:42 -0500 (0:00:00.070) 0:01:51.319 ******
ok: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
ok: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Tuesday 03 December 2024 18:27:44 -0500 (0:00:01.269) 0:01:52.588 ******
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.217) 0:01:52.805 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.070) 0:01:52.876 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.065) 0:01:52.941 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.078) 0:01:53.020 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.076) 0:01:53.097 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.057) 0:01:53.154 ******
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.099) 0:01:53.254 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153
Tuesday 03 December 2024 18:27:44 -0500 (0:00:00.060) 0:01:53.314 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.061) 0:01:53.375 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.053) 0:01:53.428 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.053) 0:01:53.482 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.245) 0:01:53.728 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.060) 0:01:53.788 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.083) 0:01:53.872 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.129) 0:01:54.002 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.057) 0:01:54.060 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.063) 0:01:54.124 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.066) 0:01:54.190 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:27:45 -0500 (0:00:00.051) 0:01:54.242 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.587) 0:01:54.832 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.086) 0:01:54.918 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.102) 0:01:55.020 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.124) 0:01:55.145 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.073) 0:01:55.218 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:27:46 -0500 (0:00:00.068) 0:01:55.286 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.063) 0:01:55.350 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.053) 0:01:55.404 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.557) 0:01:55.961 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.085) 0:01:56.047 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.113) 0:01:56.160 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:27:47 -0500 (0:00:00.115) 0:01:56.275 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.096) 0:01:56.372 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.054) 0:01:56.427 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.055) 0:01:56.482 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.068) 0:01:56.551 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.603) 0:01:57.154 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189
Tuesday 03 December 2024 18:27:48 -0500 (0:00:00.124) 0:01:57.279 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.430) 0:01:57.709 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.106) 0:01:57.815 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.088) 0:01:57.904 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.070) 0:01:57.974 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.072) 0:01:58.046 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.074) 0:01:58.121 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.050) 0:01:58.171 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.070) 0:01:58.242 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:27:49 -0500 (0:00:00.077) 0:01:58.319 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.492) 0:01:58.812 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.102) 0:01:58.914 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.112) 0:01:59.027 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.078) 0:01:59.106 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.074) 0:01:59.180 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.066) 0:01:59.247 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:27:50 -0500 (0:00:00.060) 0:01:59.308 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.055) 0:01:59.363 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.054) 0:01:59.418 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.118) 0:01:59.536 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.200) 0:01:59.737 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.041) 0:01:59.778 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.090) 0:01:59.868 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.046) 0:01:59.915 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.103) 0:02:00.018 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:27:51 -0500 (0:00:00.050) 0:02:00.069 ******
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:27:27 EST",
"ActiveEnterTimestampMonotonic": "781643277",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "-.mount quadlet-demo-network.service basic.target network-online.target sysinit.target systemd-journald.socket quadlet-demo-mysql.service system.slice",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:27:26 EST",
"AssertTimestampMonotonic": "780971405",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "297600000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:27:26 EST",
"ConditionTimestampMonotonic": "780971401",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo.service",
"ControlGroupId": "138672",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "4421",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "74433",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:27:27 EST",
"ExecMainStartTimestampMonotonic": "781642938",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Tue 2024-12-03 18:27:26 EST] ; stop_time=[n/a] ; pid=74422 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Tue 2024-12-03 18:27:26 EST] ; stop_time=[n/a] ; pid=74422 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:27:26 EST",
"InactiveExitTimestampMonotonic": "780973805",
"InvocationID": "51b9926fea8e44319e461d098e78f658",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "74433",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2862669824",
"MemoryCurrent": "3182592",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "23248896",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice quadlet-demo-mysql.service sysinit.target quadlet-demo-network.service -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:27:27 EST",
"StateChangeTimestampMonotonic": "781643277",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "4",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:27:53 -0500 (0:00:01.740) 0:02:01.809 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268444.973733,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"ctime": 1733268444.980733,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 411018,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268444.662731,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 456,
"uid": 0,
"version": "1291623594",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.559) 0:02:02.369 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.132) 0:02:02.501 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.518) 0:02:03.020 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.062) 0:02:03.082 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.049) 0:02:03.132 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:27:54 -0500 (0:00:00.048) 0:02:03.181 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:27:55 -0500 (0:00:00.505) 0:02:03.686 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:27:56 -0500 (0:00:01.029) 0:02:04.716 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:27:56 -0500 (0:00:00.070) 0:02:04.787 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:27:56 -0500 (0:00:00.112) 0:02:04.899 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:27:56 -0500 (0:00:00.123) 0:02:05.023 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.658131",
"end": "2024-12-03 18:27:57.772996",
"rc": 0,
"start": "2024-12-03 18:27:57.114865"
}
STDOUT:
9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f
a732580249591e3a0508675ded1a8612d06413d44d4fe392e36e9e4641cf6824
fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b
5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:27:57 -0500 (0:00:01.231) 0:02:06.255 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:27:58 -0500 (0:00:00.131) 0:02:06.386 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:27:58 -0500 (0:00:00.067) 0:02:06.454 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:27:58 -0500 (0:00:00.062) 0:02:06.517 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:27:58 -0500 (0:00:00.060) 0:02:06.577 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.039692",
"end": "2024-12-03 18:27:58.658694",
"rc": 0,
"start": "2024-12-03 18:27:58.619002"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:27:58 -0500 (0:00:00.587) 0:02:07.165 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.035037",
"end": "2024-12-03 18:27:59.306383",
"rc": 0,
"start": "2024-12-03 18:27:59.271346"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:27:59 -0500 (0:00:00.640) 0:02:07.806 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.039148",
"end": "2024-12-03 18:27:59.919979",
"rc": 0,
"start": "2024-12-03 18:27:59.880831"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
b40d93c63736 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:28:00 -0500 (0:00:00.571) 0:02:08.377 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.034495",
"end": "2024-12-03 18:28:00.427156",
"rc": 0,
"start": "2024-12-03 18:28:00.392661"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:28:00 -0500 (0:00:00.513) 0:02:08.891 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:28:01 -0500 (0:00:00.550) 0:02:09.444 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:28:01 -0500 (0:00:00.560) 0:02:10.005 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service": {
"name": "b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service",
"source": "systemd",
"state": "stopped",
"status": "failed"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:05 -0500 (0:00:03.560) 0:02:13.565 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.058) 0:02:13.624 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.146) 0:02:13.770 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.047) 0:02:13.818 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.039) 0:02:13.857 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.059) 0:02:13.917 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.108) 0:02:14.026 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.077) 0:02:14.104 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.103) 0:02:14.207 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:28:05 -0500 (0:00:00.104) 0:02:14.312 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.526) 0:02:14.838 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.057) 0:02:14.896 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.049) 0:02:14.946 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.045) 0:02:14.992 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.050) 0:02:15.042 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.041) 0:02:15.083 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.040) 0:02:15.124 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.038) 0:02:15.162 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.036) 0:02:15.199 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:28:06 -0500 (0:00:00.062) 0:02:15.262 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.118) 0:02:15.380 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.056) 0:02:15.437 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.157) 0:02:15.594 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.156) 0:02:15.751 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.267) 0:02:16.018 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.130) 0:02:16.149 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:28:07 -0500 (0:00:00.138) 0:02:16.287 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268419.1165571,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"ctime": 1733268414.4925258,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 290618,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268414.0195224,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 1605,
"uid": 0,
"version": "257465972",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:28:08 -0500 (0:00:00.616) 0:02:16.903 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:28:08 -0500 (0:00:00.197) 0:02:17.101 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:28:09 -0500 (0:00:00.478) 0:02:17.579 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:28:09 -0500 (0:00:00.057) 0:02:17.637 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:28:09 -0500 (0:00:00.057) 0:02:17.695 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:28:09 -0500 (0:00:00.045) 0:02:17.740 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:09 -0500 (0:00:00.447) 0:02:18.187 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:28:10 -0500 (0:00:00.978) 0:02:19.165 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:28:10 -0500 (0:00:00.096) 0:02:19.262 ******
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:12 -0500 (0:00:01.615) 0:02:20.878 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:28:12 -0500 (0:00:00.042) 0:02:20.920 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.033109",
"end": "2024-12-03 18:28:13.008804",
"rc": 0,
"start": "2024-12-03 18:28:12.975695"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:28:13 -0500 (0:00:00.546) 0:02:21.467 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:28:13 -0500 (0:00:00.198) 0:02:21.669 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:28:13 -0500 (0:00:00.114) 0:02:21.784 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:28:13 -0500 (0:00:00.087) 0:02:21.871 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:28:13 -0500 (0:00:00.123) 0:02:21.994 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.037235",
"end": "2024-12-03 18:28:14.131680",
"rc": 0,
"start": "2024-12-03 18:28:14.094445"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:28:14 -0500 (0:00:00.597) 0:02:22.592 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.035254",
"end": "2024-12-03 18:28:14.686400",
"rc": 0,
"start": "2024-12-03 18:28:14.651146"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:28:14 -0500 (0:00:00.559) 0:02:23.151 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.040519",
"end": "2024-12-03 18:28:15.230121",
"rc": 0,
"start": "2024-12-03 18:28:15.189602"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
b40d93c63736 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:28:15 -0500 (0:00:00.600) 0:02:23.753 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.032731",
"end": "2024-12-03 18:28:15.832545",
"rc": 0,
"start": "2024-12-03 18:28:15.799814"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:28:15 -0500 (0:00:00.554) 0:02:24.308 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:28:16 -0500 (0:00:00.545) 0:02:24.854 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:28:17 -0500 (0:00:00.566) 0:02:25.420 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service": {
"name": "b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service",
"source": "systemd",
"state": "stopped",
"status": "failed"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:20 -0500 (0:00:03.386) 0:02:28.806 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:28:20 -0500 (0:00:00.072) 0:02:28.879 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:28:20 -0500 (0:00:00.095) 0:02:28.975 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:28:20 -0500 (0:00:00.086) 0:02:29.061 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:20 -0500 (0:00:00.069) 0:02:29.130 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:28:20 -0500 (0:00:00.122) 0:02:29.253 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.163) 0:02:29.417 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.077) 0:02:29.494 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.063) 0:02:29.558 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.059) 0:02:29.617 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.473) 0:02:30.091 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.152) 0:02:30.244 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:28:21 -0500 (0:00:00.088) 0:02:30.332 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.063) 0:02:30.396 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.072) 0:02:30.468 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.079) 0:02:30.548 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.061) 0:02:30.609 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.048) 0:02:30.658 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.040) 0:02:30.699 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.080) 0:02:30.779 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.045) 0:02:30.825 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.037) 0:02:30.862 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.090) 0:02:30.952 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.050) 0:02:31.002 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.086) 0:02:31.089 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.045) 0:02:31.135 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:28:22 -0500 (0:00:00.064) 0:02:31.199 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268446.6527443,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"ctime": 1733268409.399491,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 290627,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268409.0264885,
"nlink": 1,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 2102,
"uid": 0,
"version": "862932835",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:28:23 -0500 (0:00:00.592) 0:02:31.792 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:28:23 -0500 (0:00:00.215) 0:02:32.008 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:28:24 -0500 (0:00:00.500) 0:02:32.508 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:28:24 -0500 (0:00:00.070) 0:02:32.578 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:28:24 -0500 (0:00:00.083) 0:02:32.662 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:28:24 -0500 (0:00:00.077) 0:02:32.739 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:24 -0500 (0:00:00.550) 0:02:33.290 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:28:25 -0500 (0:00:01.026) 0:02:34.317 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.069) 0:02:34.387 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.092) 0:02:34.480 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.065) 0:02:34.545 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.038182",
"end": "2024-12-03 18:28:26.616704",
"rc": 0,
"start": "2024-12-03 18:28:26.578522"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.505) 0:02:35.051 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.070) 0:02:35.122 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.039) 0:02:35.161 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.042) 0:02:35.203 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:28:26 -0500 (0:00:00.058) 0:02:35.262 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.038203",
"end": "2024-12-03 18:28:27.333316",
"rc": 0,
"start": "2024-12-03 18:28:27.295113"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 2 years ago 308 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:28:27 -0500 (0:00:00.509) 0:02:35.772 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.032924",
"end": "2024-12-03 18:28:27.854308",
"rc": 0,
"start": "2024-12-03 18:28:27.821384"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:28:27 -0500 (0:00:00.548) 0:02:36.321 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.043180",
"end": "2024-12-03 18:28:28.410206",
"rc": 0,
"start": "2024-12-03 18:28:28.367026"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 6 minutes ago Up 6 minutes 127.0.0.1:5000->5000/tcp podman_registry
b40d93c63736 quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:28:28 -0500 (0:00:00.589) 0:02:36.910 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.034905",
"end": "2024-12-03 18:28:29.024361",
"rc": 0,
"start": "2024-12-03 18:28:28.989456"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:28:29 -0500 (0:00:00.572) 0:02:37.483 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:28:29 -0500 (0:00:00.554) 0:02:38.038 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:28:30 -0500 (0:00:00.508) 0:02:38.546 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service": {
"name": "b40d93c63736d6095b773c67d3be97d4fe5a01b281121a6ad6dccf433a33b81a-5d0d6e2109b39dcb.service",
"source": "systemd",
"state": "stopped",
"status": "failed"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-mysql.service": {
"name": "quadlet-demo-mysql.service",
"source": "systemd",
"state": "running",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:33 -0500 (0:00:03.211) 0:02:41.758 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:28:33 -0500 (0:00:00.045) 0:02:41.803 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:28:33 -0500 (0:00:00.287) 0:02:42.091 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:28:33 -0500 (0:00:00.094) 0:02:42.186 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:33 -0500 (0:00:00.068) 0:02:42.254 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:28:33 -0500 (0:00:00.094) 0:02:42.349 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.129) 0:02:42.478 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.076) 0:02:42.555 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.078) 0:02:42.634 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.074) 0:02:42.709 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.445) 0:02:43.154 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.038) 0:02:43.193 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.040) 0:02:43.234 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.042) 0:02:43.276 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.037) 0:02:43.313 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:28:34 -0500 (0:00:00.036) 0:02:43.350 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.035) 0:02:43.386 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.042) 0:02:43.428 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.037) 0:02:43.466 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.083) 0:02:43.549 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.058) 0:02:43.608 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.071) 0:02:43.679 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.138) 0:02:43.817 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.093) 0:02:43.911 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.172) 0:02:44.083 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:28:35 -0500 (0:00:00.050) 0:02:44.134 ******
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:26:45 EST",
"ActiveEnterTimestampMonotonic": "740183780",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "quadlet-demo-network.service system.slice systemd-journald.socket quadlet-demo-mysql-volume.service network-online.target basic.target -.mount tmp.mount sysinit.target",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:26:45 EST",
"AssertTimestampMonotonic": "739831965",
"Before": "multi-user.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "3457732000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:26:45 EST",
"ConditionTimestampMonotonic": "739831961",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo-mysql.service",
"ControlGroupId": "137521",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuset io memory pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveCPUs": "0-1",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveMemoryNodes": "0",
"EffectiveTasksMax": "4421",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainHandoffTimestampMonotonic": "0",
"ExecMainPID": "70679",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:26:45 EST",
"ExecMainStartTimestampMonotonic": "740183694",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:26:45 EST",
"InactiveExitTimestampMonotonic": "739841376",
"InvocationID": "c2f4942086a54868833b1939f731b905",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "70679",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "2954305536",
"MemoryCurrent": "602726400",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "649191424",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "0",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "0",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMPolicy": "continue",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice -.mount quadlet-demo-mysql-volume.service tmp.mount quadlet-demo-network.service sysinit.target",
"RequiresMountsFor": "/tmp/quadlet_demo /run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:26:45 EST",
"StateChangeTimestampMonotonic": "740183780",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "23",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "45s",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:28:38 -0500 (0:00:02.763) 0:02:46.897 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268403.8764536,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"ctime": 1733268403.8824537,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 283313,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268403.5674515,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 363,
"uid": 0,
"version": "3027459010",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:28:39 -0500 (0:00:00.554) 0:02:47.452 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:28:39 -0500 (0:00:00.127) 0:02:47.579 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:28:39 -0500 (0:00:00.507) 0:02:48.087 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:28:39 -0500 (0:00:00.178) 0:02:48.266 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:28:40 -0500 (0:00:00.141) 0:02:48.407 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:28:40 -0500 (0:00:00.110) 0:02:48.518 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.container",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:40 -0500 (0:00:00.522) 0:02:49.041 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:28:41 -0500 (0:00:00.931) 0:02:49.972 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:28:42 -0500 (0:00:00.585) 0:02:50.558 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:42 -0500 (0:00:00.107) 0:02:50.665 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:28:42 -0500 (0:00:00.087) 0:02:50.753 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.260300",
"end": "2024-12-03 18:28:43.053945",
"rc": 0,
"start": "2024-12-03 18:28:42.793645"
}
STDOUT:
dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:28:43 -0500 (0:00:00.737) 0:02:51.491 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:28:43 -0500 (0:00:00.124) 0:02:51.615 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:28:43 -0500 (0:00:00.079) 0:02:51.694 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:28:43 -0500 (0:00:00.075) 0:02:51.770 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:28:43 -0500 (0:00:00.166) 0:02:51.936 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.035792",
"end": "2024-12-03 18:28:43.991205",
"rc": 0,
"start": "2024-12-03 18:28:43.955413"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:28:44 -0500 (0:00:00.517) 0:02:52.454 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.033583",
"end": "2024-12-03 18:28:44.503111",
"rc": 0,
"start": "2024-12-03 18:28:44.469528"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
local systemd-quadlet-demo-mysql
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:28:44 -0500 (0:00:00.484) 0:02:52.939 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.039112",
"end": "2024-12-03 18:28:44.987818",
"rc": 0,
"start": "2024-12-03 18:28:44.948706"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 7 minutes ago Up 7 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:28:45 -0500 (0:00:00.484) 0:02:53.424 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.034531",
"end": "2024-12-03 18:28:45.463824",
"rc": 0,
"start": "2024-12-03 18:28:45.429293"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:28:45 -0500 (0:00:00.496) 0:02:53.920 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:28:46 -0500 (0:00:00.491) 0:02:54.412 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:28:46 -0500 (0:00:00.496) 0:02:54.909 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quadlet-demo-mysql-volume.service": {
"name": "quadlet-demo-mysql-volume.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:49 -0500 (0:00:03.265) 0:02:58.175 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:28:49 -0500 (0:00:00.122) 0:02:58.297 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.131) 0:02:58.429 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.082) 0:02:58.511 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.061) 0:02:58.573 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.069) 0:02:58.642 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.075) 0:02:58.718 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.049) 0:02:58.768 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.057) 0:02:58.826 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:28:50 -0500 (0:00:00.086) 0:02:58.912 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.519) 0:02:59.432 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.048) 0:02:59.480 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.045) 0:02:59.525 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.036) 0:02:59.562 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.036) 0:02:59.598 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.040) 0:02:59.638 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.042) 0:02:59.681 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.061) 0:02:59.742 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.052) 0:02:59.795 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.080) 0:02:59.875 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.051) 0:02:59.926 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.041) 0:02:59.968 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.083) 0:03:00.051 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.050) 0:03:00.102 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.085) 0:03:00.187 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:28:51 -0500 (0:00:00.038) 0:03:00.226 ******
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-mysql-volume.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:26:31 EST",
"ActiveEnterTimestampMonotonic": "725647496",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "systemd-journald.socket basic.target network-online.target -.mount sysinit.target system.slice",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:26:31 EST",
"AssertTimestampMonotonic": "725586579",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "47836000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:26:31 EST",
"ConditionTimestampMonotonic": "725586576",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "136877",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Tue 2024-12-03 18:26:31 EST",
"ExecMainExitTimestampMonotonic": "725647321",
"ExecMainHandoffTimestamp": "Tue 2024-12-03 18:26:31 EST",
"ExecMainHandoffTimestampMonotonic": "725598691",
"ExecMainPID": "69247",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:26:31 EST",
"ExecMainStartTimestampMonotonic": "725587529",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:26:31 EST",
"InactiveExitTimestampMonotonic": "725588064",
"InvocationID": "842a9910ea5247c68980a59e9894bab5",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3429380096",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "13905920",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:26:31 EST",
"StateChangeTimestampMonotonic": "725647496",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:28:52 -0500 (0:00:01.019) 0:03:01.245 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268389.4353552,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"ctime": 1733268389.4413552,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 283310,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268389.130353,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 9,
"uid": 0,
"version": "3161569277",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:28:53 -0500 (0:00:00.523) 0:03:01.768 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:28:53 -0500 (0:00:00.127) 0:03:01.896 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:28:54 -0500 (0:00:00.478) 0:03:02.374 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:28:54 -0500 (0:00:00.061) 0:03:02.436 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:28:54 -0500 (0:00:00.046) 0:03:02.483 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:28:54 -0500 (0:00:00.047) 0:03:02.531 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:28:54 -0500 (0:00:00.585) 0:03:03.116 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:28:55 -0500 (0:00:00.919) 0:03:04.035 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:28:56 -0500 (0:00:00.578) 0:03:04.614 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:28:56 -0500 (0:00:00.057) 0:03:04.672 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:28:56 -0500 (0:00:00.063) 0:03:04.736 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.035276",
"end": "2024-12-03 18:28:56.792234",
"rc": 0,
"start": "2024-12-03 18:28:56.756958"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:28:56 -0500 (0:00:00.493) 0:03:05.230 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:28:56 -0500 (0:00:00.084) 0:03:05.314 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:28:57 -0500 (0:00:00.057) 0:03:05.372 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:28:57 -0500 (0:00:00.042) 0:03:05.414 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:28:57 -0500 (0:00:00.044) 0:03:05.459 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.035919",
"end": "2024-12-03 18:28:57.504920",
"rc": 0,
"start": "2024-12-03 18:28:57.469001"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:28:57 -0500 (0:00:00.505) 0:03:05.964 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.033133",
"end": "2024-12-03 18:28:58.034380",
"rc": 0,
"start": "2024-12-03 18:28:58.001247"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:28:58 -0500 (0:00:00.506) 0:03:06.471 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.040397",
"end": "2024-12-03 18:28:58.519421",
"rc": 0,
"start": "2024-12-03 18:28:58.479024"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 7 minutes ago Up 7 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:28:58 -0500 (0:00:00.481) 0:03:06.952 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.034918",
"end": "2024-12-03 18:28:59.009369",
"rc": 0,
"start": "2024-12-03 18:28:58.974451"
}
STDOUT:
podman
podman-default-kube-network
systemd-quadlet-demo
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:28:59 -0500 (0:00:00.498) 0:03:07.450 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:28:59 -0500 (0:00:00.517) 0:03:07.968 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:29:00 -0500 (0:00:00.637) 0:03:08.605 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quadlet-demo-network.service": {
"name": "quadlet-demo-network.service",
"source": "systemd",
"state": "stopped",
"status": "generated"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:29:03 -0500 (0:00:03.271) 0:03:11.876 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.064) 0:03:11.941 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.103) 0:03:12.044 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.085) 0:03:12.130 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.046) 0:03:12.176 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.066) 0:03:12.243 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Tuesday 03 December 2024 18:29:03 -0500 (0:00:00.105) 0:03:12.348 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.057) 0:03:12.406 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.053) 0:03:12.460 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.082) 0:03:12.542 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268073.9241958,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "0c228ad086513530aab958732f1fb01238bc39b0",
"ctime": 1733268033.276886,
"dev": 51714,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 192187,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-pie-executable",
"mode": "0755",
"mtime": 1728518400.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 15728,
"uid": 0,
"version": "1583145383",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.486) 0:03:13.029 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.039) 0:03:13.068 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.041) 0:03:13.109 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.045) 0:03:13.155 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.065) 0:03:13.220 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72
Tuesday 03 December 2024 18:29:04 -0500 (0:00:00.098) 0:03:13.318 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.068) 0:03:13.387 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.074) 0:03:13.461 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.077) 0:03:13.538 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.112) 0:03:13.650 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.071) 0:03:13.721 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.069) 0:03:13.791 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.144) 0:03:13.935 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.078) 0:03:14.014 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Tuesday 03 December 2024 18:29:05 -0500 (0:00:00.154) 0:03:14.168 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Tuesday 03 December 2024 18:29:06 -0500 (0:00:00.187) 0:03:14.356 ******
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo-network.service",
"state": "stopped",
"status": {
"AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
"ActiveEnterTimestamp": "Tue 2024-12-03 18:26:26 EST",
"ActiveEnterTimestampMonotonic": "720591304",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "systemd-journald.socket network-online.target basic.target -.mount sysinit.target system.slice",
"AllowIsolate": "no",
"AssertResult": "yes",
"AssertTimestamp": "Tue 2024-12-03 18:26:26 EST",
"AssertTimestampMonotonic": "720537728",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "yes",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "47574000",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
"CleanResult": "success",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Tue 2024-12-03 18:26:26 EST",
"ConditionTimestampMonotonic": "720537724",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroupId": "136833",
"ControlPID": "0",
"CoredumpFilter": "0x33",
"CoredumpReceive": "no",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"DefaultStartupMemoryLow": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
"DynamicUser": "no",
"EffectiveMemoryHigh": "3896733696",
"EffectiveMemoryMax": "3896733696",
"EffectiveTasksMax": "4421",
"Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
"ExecMainCode": "1",
"ExecMainExitTimestamp": "Tue 2024-12-03 18:26:26 EST",
"ExecMainExitTimestampMonotonic": "720591138",
"ExecMainHandoffTimestamp": "Tue 2024-12-03 18:26:26 EST",
"ExecMainHandoffTimestampMonotonic": "720551581",
"ExecMainPID": "68279",
"ExecMainStartTimestamp": "Tue 2024-12-03 18:26:26 EST",
"ExecMainStartTimestampMonotonic": "720538723",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExitType": "main",
"ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FileDescriptorStorePreserve": "restart",
"FinalKillSignal": "9",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOReadBytes": "[not set]",
"IOReadOperations": "[not set]",
"IOSchedulingClass": "2",
"IOSchedulingPriority": "4",
"IOWeight": "[not set]",
"IOWriteBytes": "[not set]",
"IOWriteOperations": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "[no data]",
"IPEgressPackets": "[no data]",
"IPIngressBytes": "[no data]",
"IPIngressPackets": "[no data]",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Tue 2024-12-03 18:26:26 EST",
"InactiveExitTimestampMonotonic": "720539296",
"InvocationID": "9ed94ef59894418497182ef7749bda47",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "infinity",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "8388608",
"LimitMEMLOCKSoft": "8388608",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "524288",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14739",
"LimitNPROCSoft": "14739",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14739",
"LimitSIGPENDINGSoft": "14739",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"ManagedOOMMemoryPressure": "auto",
"ManagedOOMMemoryPressureLimit": "0",
"ManagedOOMPreference": "none",
"ManagedOOMSwap": "auto",
"MemoryAccounting": "yes",
"MemoryAvailable": "3415539712",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryKSM": "no",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemoryPeak": "13799424",
"MemoryPressureThresholdUSec": "200ms",
"MemoryPressureWatch": "auto",
"MemorySwapCurrent": "[not set]",
"MemorySwapMax": "infinity",
"MemorySwapPeak": "0",
"MemoryZSwapCurrent": "[not set]",
"MemoryZSwapMax": "infinity",
"MemoryZSwapWriteback": "yes",
"MountAPIVFS": "no",
"MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMPolicy": "stop",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"OnSuccessJobMode": "fail",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateIPC": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProcSubset": "all",
"ProtectClock": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectHostname": "no",
"ProtectKernelLogs": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectProc": "default",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"ReloadResult": "success",
"ReloadSignal": "1",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice -.mount",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartKillSignal": "15",
"RestartMaxDelayUSec": "infinity",
"RestartMode": "normal",
"RestartSteps": "0",
"RestartUSec": "100ms",
"RestartUSecNext": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RootEphemeral": "no",
"RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"RuntimeRandomizedExtraUSec": "0",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"SetLoginEnvironment": "no",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.network",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StartupMemoryHigh": "infinity",
"StartupMemoryLow": "0",
"StartupMemoryMax": "infinity",
"StartupMemorySwapMax": "infinity",
"StartupMemoryZSwapMax": "infinity",
"StateChangeTimestamp": "Tue 2024-12-03 18:26:26 EST",
"StateChangeTimestampMonotonic": "720591304",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "exited",
"SuccessAction": "none",
"SurviveFinalKillSignal": "no",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "2147483646",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "4421",
"TimeoutAbortUSec": "45s",
"TimeoutCleanUSec": "infinity",
"TimeoutStartFailureMode": "terminate",
"TimeoutStartUSec": "infinity",
"TimeoutStopFailureMode": "abort",
"TimeoutStopUSec": "45s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"Wants": "network-online.target",
"WatchdogSignal": "6",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33
Tuesday 03 December 2024 18:29:06 -0500 (0:00:00.959) 0:03:15.315 ******
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1733268384.4073207,
"attr_flags": "e",
"attributes": [
"extents"
],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"ctime": 1733268384.4133208,
"dev": 51714,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 283308,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1733268383.993318,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.network",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 74,
"uid": 0,
"version": "26941861",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38
Tuesday 03 December 2024 18:29:07 -0500 (0:00:00.468) 0:03:15.783 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Tuesday 03 December 2024 18:29:07 -0500 (0:00:00.086) 0:03:15.870 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Tuesday 03 December 2024 18:29:07 -0500 (0:00:00.422) 0:03:16.292 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44
Tuesday 03 December 2024 18:29:07 -0500 (0:00:00.058) 0:03:16.350 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Reset raw variable] *******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52
Tuesday 03 December 2024 18:29:08 -0500 (0:00:00.043) 0:03:16.394 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_raw": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Remove quadlet file] ******************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42
Tuesday 03 December 2024 18:29:08 -0500 (0:00:00.039) 0:03:16.433 ******
changed: [managed-node1] => {
"changed": true,
"path": "/etc/containers/systemd/quadlet-demo.network",
"state": "absent"
}
TASK [fedora.linux_system_roles.podman : Refresh systemd] **********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48
Tuesday 03 December 2024 18:29:08 -0500 (0:00:00.454) 0:03:16.888 ******
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Remove managed resource] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58
Tuesday 03 December 2024 18:29:09 -0500 (0:00:00.894) 0:03:17.783 ******
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Remove volumes] ***********************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
Tuesday 03 December 2024 18:29:09 -0500 (0:00:00.508) 0:03:18.291 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] *********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116
Tuesday 03 December 2024 18:29:09 -0500 (0:00:00.056) 0:03:18.348 ******
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_parsed": null
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.061) 0:03:18.409 ******
changed: [managed-node1] => {
"changed": true,
"cmd": [
"podman",
"image",
"prune",
"--all",
"-f"
],
"delta": "0:00:00.036453",
"end": "2024-12-03 18:29:10.475597",
"rc": 0,
"start": "2024-12-03 18:29:10.439144"
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.501) 0:03:18.911 ******
included: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.074) 0:03:18.985 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.037) 0:03:19.023 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.037) 0:03:19.061 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - images] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141
Tuesday 03 December 2024 18:29:10 -0500 (0:00:00.041) 0:03:19.102 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"images",
"-n"
],
"delta": "0:00:00.035070",
"end": "2024-12-03 18:29:11.146016",
"rc": 0,
"start": "2024-12-03 18:29:11.110946"
}
STDOUT:
quay.io/libpod/registry 2.8.2 0030ba3d620c 16 months ago 24.6 MB
TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150
Tuesday 03 December 2024 18:29:11 -0500 (0:00:00.477) 0:03:19.579 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls",
"-n"
],
"delta": "0:00:00.034948",
"end": "2024-12-03 18:29:11.623847",
"rc": 0,
"start": "2024-12-03 18:29:11.588899"
}
STDOUT:
local 774d37aaaacaaa4246fdd6111cd5ecaa847e2ab3b455cc9073063cc739ac90a5
TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159
Tuesday 03 December 2024 18:29:11 -0500 (0:00:00.476) 0:03:20.056 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"--noheading"
],
"delta": "0:00:00.040688",
"end": "2024-12-03 18:29:12.103867",
"rc": 0,
"start": "2024-12-03 18:29:12.063179"
}
STDOUT:
88a75afea3b9 quay.io/libpod/registry:2.8.2 /etc/docker/regis... 7 minutes ago Up 7 minutes 127.0.0.1:5000->5000/tcp podman_registry
TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168
Tuesday 03 December 2024 18:29:12 -0500 (0:00:00.480) 0:03:20.537 ******
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"network",
"ls",
"-n",
"-q"
],
"delta": "0:00:00.033821",
"end": "2024-12-03 18:29:12.581627",
"rc": 0,
"start": "2024-12-03 18:29:12.547806"
}
STDOUT:
podman
podman-default-kube-network
TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177
Tuesday 03 December 2024 18:29:12 -0500 (0:00:00.483) 0:03:21.020 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187
Tuesday 03 December 2024 18:29:13 -0500 (0:00:00.499) 0:03:21.519 ******
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : For testing and debugging - services] ***
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
Tuesday 03 December 2024 18:29:13 -0500 (0:00:00.510) 0:03:22.030 ******
ok: [managed-node1] => {
"ansible_facts": {
"services": {
"NetworkManager-dispatcher.service": {
"name": "NetworkManager-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"NetworkManager-wait-online.service": {
"name": "NetworkManager-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"NetworkManager.service": {
"name": "NetworkManager.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"audit-rules.service": {
"name": "audit-rules.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"auditd.service": {
"name": "auditd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"auth-rpcgss-module.service": {
"name": "auth-rpcgss-module.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"autovt@.service": {
"name": "autovt@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"blk-availability.service": {
"name": "blk-availability.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"bluetooth.service": {
"name": "bluetooth.service",
"source": "systemd",
"state": "inactive",
"status": "enabled"
},
"capsule@.service": {
"name": "capsule@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"certmonger.service": {
"name": "certmonger.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"chrony-wait.service": {
"name": "chrony-wait.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd-restricted.service": {
"name": "chronyd-restricted.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"chronyd.service": {
"name": "chronyd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"cloud-config.service": {
"name": "cloud-config.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-final.service": {
"name": "cloud-final.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init-hotplugd.service": {
"name": "cloud-init-hotplugd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"cloud-init-local.service": {
"name": "cloud-init-local.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"cloud-init.service": {
"name": "cloud-init.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"console-getty.service": {
"name": "console-getty.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"container-getty@.service": {
"name": "container-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"dbus-broker.service": {
"name": "dbus-broker.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"dbus-org.bluez.service": {
"name": "dbus-org.bluez.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.fedoraproject.FirewallD1.service": {
"name": "dbus-org.fedoraproject.FirewallD1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.home1.service": {
"name": "dbus-org.freedesktop.home1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.hostname1.service": {
"name": "dbus-org.freedesktop.hostname1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.locale1.service": {
"name": "dbus-org.freedesktop.locale1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.login1.service": {
"name": "dbus-org.freedesktop.login1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.nm-dispatcher.service": {
"name": "dbus-org.freedesktop.nm-dispatcher.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.oom1.service": {
"name": "dbus-org.freedesktop.oom1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.portable1.service": {
"name": "dbus-org.freedesktop.portable1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus-org.freedesktop.resolve1.service": {
"name": "dbus-org.freedesktop.resolve1.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"dbus-org.freedesktop.timedate1.service": {
"name": "dbus-org.freedesktop.timedate1.service",
"source": "systemd",
"state": "inactive",
"status": "alias"
},
"dbus.service": {
"name": "dbus.service",
"source": "systemd",
"state": "active",
"status": "alias"
},
"debug-shell.service": {
"name": "debug-shell.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd.service": {
"name": "dhcpcd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dhcpcd@.service": {
"name": "dhcpcd@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"display-manager.service": {
"name": "display-manager.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"dm-event.service": {
"name": "dm-event.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dnf-system-upgrade-cleanup.service": {
"name": "dnf-system-upgrade-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf-system-upgrade.service": {
"name": "dnf-system-upgrade.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dnf5-makecache.service": {
"name": "dnf5-makecache.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction-cleanup.service": {
"name": "dnf5-offline-transaction-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"dnf5-offline-transaction.service": {
"name": "dnf5-offline-transaction.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"dracut-cmdline.service": {
"name": "dracut-cmdline.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-initqueue.service": {
"name": "dracut-initqueue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-mount.service": {
"name": "dracut-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-mount.service": {
"name": "dracut-pre-mount.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-pivot.service": {
"name": "dracut-pre-pivot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-trigger.service": {
"name": "dracut-pre-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-pre-udev.service": {
"name": "dracut-pre-udev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown-onfailure.service": {
"name": "dracut-shutdown-onfailure.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"dracut-shutdown.service": {
"name": "dracut-shutdown.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ebtables.service": {
"name": "ebtables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"emergency.service": {
"name": "emergency.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fips-crypto-policy-overlay.service": {
"name": "fips-crypto-policy-overlay.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"firewalld.service": {
"name": "firewalld.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"fsidd.service": {
"name": "fsidd.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"fstrim.service": {
"name": "fstrim.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"fwupd-offline-update.service": {
"name": "fwupd-offline-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd-refresh.service": {
"name": "fwupd-refresh.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"fwupd.service": {
"name": "fwupd.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"getty@.service": {
"name": "getty@.service",
"source": "systemd",
"state": "unknown",
"status": "enabled"
},
"getty@tty1.service": {
"name": "getty@tty1.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"grub-boot-indeterminate.service": {
"name": "grub-boot-indeterminate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"grub2-systemd-integration.service": {
"name": "grub2-systemd-integration.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"gssproxy.service": {
"name": "gssproxy.service",
"source": "systemd",
"state": "running",
"status": "disabled"
},
"hv_kvp_daemon.service": {
"name": "hv_kvp_daemon.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"initrd-cleanup.service": {
"name": "initrd-cleanup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-parse-etc.service": {
"name": "initrd-parse-etc.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-switch-root.service": {
"name": "initrd-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"initrd-udevadm-cleanup-db.service": {
"name": "initrd-udevadm-cleanup-db.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ip6tables.service": {
"name": "ip6tables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ipset.service": {
"name": "ipset.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"iptables.service": {
"name": "iptables.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"kmod-static-nodes.service": {
"name": "kmod-static-nodes.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"ldconfig.service": {
"name": "ldconfig.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm-devices-import.service": {
"name": "lvm-devices-import.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"lvm2-lvmpolld.service": {
"name": "lvm2-lvmpolld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"lvm2-monitor.service": {
"name": "lvm2-monitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"man-db-cache-update.service": {
"name": "man-db-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"man-db-restart-cache-update.service": {
"name": "man-db-restart-cache-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"mdadm-grow-continue@.service": {
"name": "mdadm-grow-continue@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdadm-last-resort@.service": {
"name": "mdadm-last-resort@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdcheck_continue.service": {
"name": "mdcheck_continue.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdcheck_start.service": {
"name": "mdcheck_start.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmon@.service": {
"name": "mdmon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"mdmonitor-oneshot.service": {
"name": "mdmonitor-oneshot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"mdmonitor.service": {
"name": "mdmonitor.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"modprobe@.service": {
"name": "modprobe@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"modprobe@configfs.service": {
"name": "modprobe@configfs.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@dm_mod.service": {
"name": "modprobe@dm_mod.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@drm.service": {
"name": "modprobe@drm.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@efi_pstore.service": {
"name": "modprobe@efi_pstore.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@fuse.service": {
"name": "modprobe@fuse.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"modprobe@loop.service": {
"name": "modprobe@loop.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"netavark-dhcp-proxy.service": {
"name": "netavark-dhcp-proxy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"netavark-firewalld-reload.service": {
"name": "netavark-firewalld-reload.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"network.service": {
"name": "network.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"nfs-blkmap.service": {
"name": "nfs-blkmap.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nfs-idmapd.service": {
"name": "nfs-idmapd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-mountd.service": {
"name": "nfs-mountd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfs-server.service": {
"name": "nfs-server.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"nfs-utils.service": {
"name": "nfs-utils.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nfsdcld.service": {
"name": "nfsdcld.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"nftables.service": {
"name": "nftables.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nis-domainname.service": {
"name": "nis-domainname.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"nm-priv-helper.service": {
"name": "nm-priv-helper.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"ntpd.service": {
"name": "ntpd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ntpdate.service": {
"name": "ntpdate.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"pam_namespace.service": {
"name": "pam_namespace.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"pcscd.service": {
"name": "pcscd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"plymouth-halt.service": {
"name": "plymouth-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-kexec.service": {
"name": "plymouth-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-poweroff.service": {
"name": "plymouth-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-quit-wait.service": {
"name": "plymouth-quit-wait.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-quit.service": {
"name": "plymouth-quit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-read-write.service": {
"name": "plymouth-read-write.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-reboot.service": {
"name": "plymouth-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-start.service": {
"name": "plymouth-start.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"plymouth-switch-root-initramfs.service": {
"name": "plymouth-switch-root-initramfs.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"plymouth-switch-root.service": {
"name": "plymouth-switch-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"podman-auto-update.service": {
"name": "podman-auto-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-clean-transient.service": {
"name": "podman-clean-transient.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman-kube@.service": {
"name": "podman-kube@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"podman-restart.service": {
"name": "podman-restart.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"podman.service": {
"name": "podman.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"polkit.service": {
"name": "polkit.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"quotaon-root.service": {
"name": "quotaon-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"quotaon@.service": {
"name": "quotaon@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"raid-check.service": {
"name": "raid-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rc-local.service": {
"name": "rc-local.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rescue.service": {
"name": "rescue.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"restraintd.service": {
"name": "restraintd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rngd.service": {
"name": "rngd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"rpc-gssd.service": {
"name": "rpc-gssd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd-notify.service": {
"name": "rpc-statd-notify.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-statd.service": {
"name": "rpc-statd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"rpc-svcgssd.service": {
"name": "rpc-svcgssd.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"rpcbind.service": {
"name": "rpcbind.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"rpmdb-migrate.service": {
"name": "rpmdb-migrate.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"rpmdb-rebuild.service": {
"name": "rpmdb-rebuild.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"selinux-autorelabel-mark.service": {
"name": "selinux-autorelabel-mark.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"selinux-autorelabel.service": {
"name": "selinux-autorelabel.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"selinux-check-proper-disable.service": {
"name": "selinux-check-proper-disable.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"serial-getty@.service": {
"name": "serial-getty@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"serial-getty@ttyS0.service": {
"name": "serial-getty@ttyS0.service",
"source": "systemd",
"state": "running",
"status": "active"
},
"sntp.service": {
"name": "sntp.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"ssh-host-keys-migration.service": {
"name": "ssh-host-keys-migration.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"sshd-keygen.service": {
"name": "sshd-keygen.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"sshd-keygen@.service": {
"name": "sshd-keygen@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"sshd-keygen@ecdsa.service": {
"name": "sshd-keygen@ecdsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@ed25519.service": {
"name": "sshd-keygen@ed25519.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-keygen@rsa.service": {
"name": "sshd-keygen@rsa.service",
"source": "systemd",
"state": "stopped",
"status": "inactive"
},
"sshd-unix-local@.service": {
"name": "sshd-unix-local@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd-vsock@.service": {
"name": "sshd-vsock@.service",
"source": "systemd",
"state": "unknown",
"status": "alias"
},
"sshd.service": {
"name": "sshd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"sshd@.service": {
"name": "sshd@.service",
"source": "systemd",
"state": "unknown",
"status": "indirect"
},
"sssd-autofs.service": {
"name": "sssd-autofs.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-kcm.service": {
"name": "sssd-kcm.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"sssd-nss.service": {
"name": "sssd-nss.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pac.service": {
"name": "sssd-pac.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-pam.service": {
"name": "sssd-pam.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-ssh.service": {
"name": "sssd-ssh.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd-sudo.service": {
"name": "sssd-sudo.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"sssd.service": {
"name": "sssd.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"syslog.service": {
"name": "syslog.service",
"source": "systemd",
"state": "stopped",
"status": "not-found"
},
"system-update-cleanup.service": {
"name": "system-update-cleanup.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-ask-password-console.service": {
"name": "systemd-ask-password-console.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-plymouth.service": {
"name": "systemd-ask-password-plymouth.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-ask-password-wall.service": {
"name": "systemd-ask-password-wall.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-backlight@.service": {
"name": "systemd-backlight@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-battery-check.service": {
"name": "systemd-battery-check.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-binfmt.service": {
"name": "systemd-binfmt.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-bless-boot.service": {
"name": "systemd-bless-boot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-boot-check-no-failures.service": {
"name": "systemd-boot-check-no-failures.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-boot-random-seed.service": {
"name": "systemd-boot-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-boot-update.service": {
"name": "systemd-boot-update.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-bootctl@.service": {
"name": "systemd-bootctl@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-bsod.service": {
"name": "systemd-bsod.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-confext.service": {
"name": "systemd-confext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-coredump@.service": {
"name": "systemd-coredump@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-creds@.service": {
"name": "systemd-creds@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-exit.service": {
"name": "systemd-exit.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-firstboot.service": {
"name": "systemd-firstboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-fsck-root.service": {
"name": "systemd-fsck-root.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-fsck@.service": {
"name": "systemd-fsck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-growfs-root.service": {
"name": "systemd-growfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-growfs@.service": {
"name": "systemd-growfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-halt.service": {
"name": "systemd-halt.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-hibernate-clear.service": {
"name": "systemd-hibernate-clear.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate-resume.service": {
"name": "systemd-hibernate-resume.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hibernate.service": {
"name": "systemd-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-homed-activate.service": {
"name": "systemd-homed-activate.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-homed-firstboot.service": {
"name": "systemd-homed-firstboot.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-homed.service": {
"name": "systemd-homed.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-hostnamed.service": {
"name": "systemd-hostnamed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hwdb-update.service": {
"name": "systemd-hwdb-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-hybrid-sleep.service": {
"name": "systemd-hybrid-sleep.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-initctl.service": {
"name": "systemd-initctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-catalog-update.service": {
"name": "systemd-journal-catalog-update.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journal-flush.service": {
"name": "systemd-journal-flush.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-journald-sync@.service": {
"name": "systemd-journald-sync@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-journald.service": {
"name": "systemd-journald.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-journald@.service": {
"name": "systemd-journald@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-kexec.service": {
"name": "systemd-kexec.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-localed.service": {
"name": "systemd-localed.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-logind.service": {
"name": "systemd-logind.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-machine-id-commit.service": {
"name": "systemd-machine-id-commit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-modules-load.service": {
"name": "systemd-modules-load.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-mountfsd.service": {
"name": "systemd-mountfsd.service",
"source": "systemd",
"state": "stopped",
"status": "indirect"
},
"systemd-network-generator.service": {
"name": "systemd-network-generator.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-networkd-persistent-storage.service": {
"name": "systemd-networkd-persistent-storage.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-networkd-wait-online.service": {
"name": "systemd-networkd-wait-online.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-networkd-wait-online@.service": {
"name": "systemd-networkd-wait-online@.service",
"source": "systemd",
"state": "unknown",
"status": "disabled"
},
"systemd-networkd.service": {
"name": "systemd-networkd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-nsresourced.service": {
"name": "systemd-nsresourced.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-oomd.service": {
"name": "systemd-oomd.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-pcrextend@.service": {
"name": "systemd-pcrextend@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrfs-root.service": {
"name": "systemd-pcrfs-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pcrfs@.service": {
"name": "systemd-pcrfs@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrlock-file-system.service": {
"name": "systemd-pcrlock-file-system.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-code.service": {
"name": "systemd-pcrlock-firmware-code.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-firmware-config.service": {
"name": "systemd-pcrlock-firmware-config.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-machine-id.service": {
"name": "systemd-pcrlock-machine-id.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-make-policy.service": {
"name": "systemd-pcrlock-make-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-authority.service": {
"name": "systemd-pcrlock-secureboot-authority.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock-secureboot-policy.service": {
"name": "systemd-pcrlock-secureboot-policy.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-pcrlock@.service": {
"name": "systemd-pcrlock@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-pcrmachine.service": {
"name": "systemd-pcrmachine.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-initrd.service": {
"name": "systemd-pcrphase-initrd.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase-sysinit.service": {
"name": "systemd-pcrphase-sysinit.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-pcrphase.service": {
"name": "systemd-pcrphase.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-portabled.service": {
"name": "systemd-portabled.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-poweroff.service": {
"name": "systemd-poweroff.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-pstore.service": {
"name": "systemd-pstore.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-quotacheck-root.service": {
"name": "systemd-quotacheck-root.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-quotacheck@.service": {
"name": "systemd-quotacheck@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-random-seed.service": {
"name": "systemd-random-seed.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-reboot.service": {
"name": "systemd-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-remount-fs.service": {
"name": "systemd-remount-fs.service",
"source": "systemd",
"state": "stopped",
"status": "enabled-runtime"
},
"systemd-repart.service": {
"name": "systemd-repart.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-resolved.service": {
"name": "systemd-resolved.service",
"source": "systemd",
"state": "running",
"status": "enabled"
},
"systemd-rfkill.service": {
"name": "systemd-rfkill.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-soft-reboot.service": {
"name": "systemd-soft-reboot.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-storagetm.service": {
"name": "systemd-storagetm.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend-then-hibernate.service": {
"name": "systemd-suspend-then-hibernate.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-suspend.service": {
"name": "systemd-suspend.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-sysctl.service": {
"name": "systemd-sysctl.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-sysext.service": {
"name": "systemd-sysext.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"systemd-sysext@.service": {
"name": "systemd-sysext@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-sysupdate-reboot.service": {
"name": "systemd-sysupdate-reboot.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysupdate.service": {
"name": "systemd-sysupdate.service",
"source": "systemd",
"state": "inactive",
"status": "indirect"
},
"systemd-sysusers.service": {
"name": "systemd-sysusers.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-time-wait-sync.service": {
"name": "systemd-time-wait-sync.service",
"source": "systemd",
"state": "inactive",
"status": "disabled"
},
"systemd-timedated.service": {
"name": "systemd-timedated.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-timesyncd.service": {
"name": "systemd-timesyncd.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-tmpfiles-clean.service": {
"name": "systemd-tmpfiles-clean.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev-early.service": {
"name": "systemd-tmpfiles-setup-dev-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup-dev.service": {
"name": "systemd-tmpfiles-setup-dev.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tmpfiles-setup.service": {
"name": "systemd-tmpfiles-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup-early.service": {
"name": "systemd-tpm2-setup-early.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-tpm2-setup.service": {
"name": "systemd-tpm2-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-load-credentials.service": {
"name": "systemd-udev-load-credentials.service",
"source": "systemd",
"state": "stopped",
"status": "disabled"
},
"systemd-udev-settle.service": {
"name": "systemd-udev-settle.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udev-trigger.service": {
"name": "systemd-udev-trigger.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-udevd.service": {
"name": "systemd-udevd.service",
"source": "systemd",
"state": "running",
"status": "static"
},
"systemd-update-done.service": {
"name": "systemd-update-done.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp-runlevel.service": {
"name": "systemd-update-utmp-runlevel.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-update-utmp.service": {
"name": "systemd-update-utmp.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-user-sessions.service": {
"name": "systemd-user-sessions.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-userdbd.service": {
"name": "systemd-userdbd.service",
"source": "systemd",
"state": "running",
"status": "indirect"
},
"systemd-vconsole-setup.service": {
"name": "systemd-vconsole-setup.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"systemd-volatile-root.service": {
"name": "systemd-volatile-root.service",
"source": "systemd",
"state": "inactive",
"status": "static"
},
"systemd-zram-setup@.service": {
"name": "systemd-zram-setup@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"systemd-zram-setup@zram0.service": {
"name": "systemd-zram-setup@zram0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"udisks2.service": {
"name": "udisks2.service",
"source": "systemd",
"state": "stopped",
"status": "enabled"
},
"unbound-anchor.service": {
"name": "unbound-anchor.service",
"source": "systemd",
"state": "stopped",
"status": "static"
},
"user-runtime-dir@.service": {
"name": "user-runtime-dir@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user-runtime-dir@0.service": {
"name": "user-runtime-dir@0.service",
"source": "systemd",
"state": "stopped",
"status": "active"
},
"user@.service": {
"name": "user@.service",
"source": "systemd",
"state": "unknown",
"status": "static"
},
"user@0.service": {
"name": "user@0.service",
"source": "systemd",
"state": "running",
"status": "active"
}
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116
Tuesday 03 December 2024 18:29:16 -0500 (0:00:03.161) 0:03:25.192 ******
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196
Tuesday 03 December 2024 18:29:16 -0500 (0:00:00.041) 0:03:25.233 ******
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202
Tuesday 03 December 2024 18:29:16 -0500 (0:00:00.036) 0:03:25.270 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211
Tuesday 03 December 2024 18:29:16 -0500 (0:00:00.037) 0:03:25.307 ******
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Ensure no resources] *****************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188
Tuesday 03 December 2024 18:29:17 -0500 (0:00:00.063) 0:03:25.371 ******
fatal: [managed-node1]: FAILED! => {
"assertion": "__podman_test_debug_images.stdout == \"\"",
"changed": false,
"evaluated_to": false
}
MSG:
Assertion failed
TASK [Debug] *******************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199
Tuesday 03 December 2024 18:29:17 -0500 (0:00:00.057) 0:03:25.429 ******
ok: [managed-node1] => {
"changed": false,
"cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n",
"delta": "0:00:00.670048",
"end": "2024-12-03 18:29:18.100920",
"rc": 0,
"start": "2024-12-03 18:29:17.430872"
}
STDERR:
+ set -o pipefail
+ systemctl list-units --plain -l --all
+ grep quadlet
+ :
+ systemctl list-unit-files --all
+ grep quadlet
+ :
+ systemctl list-units --plain --failed -l --all
+ grep quadlet
+ :
TASK [Get journald] ************************************************************
task path: /tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209
Tuesday 03 December 2024 18:29:18 -0500 (0:00:01.131) 0:03:26.560 ******
fatal: [managed-node1]: FAILED! => {
"changed": false,
"cmd": [
"journalctl",
"-ex"
],
"delta": "0:00:00.024275",
"end": "2024-12-03 18:29:18.593311",
"failed_when_result": true,
"rc": 0,
"start": "2024-12-03 18:29:18.569036"
}
STDOUT:
Dec 03 18:28:54 managed-node1 audit[89162]: CRYPTO_KEY_USER pid=89162 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89162 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89192]: CRYPTO_KEY_USER pid=89192 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89192 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89216]: CRYPTO_KEY_USER pid=89216 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89216 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[89241]: CRYPTO_KEY_USER pid=89241 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89241 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:54 managed-node1 python3[89267]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89268]: CRYPTO_KEY_USER pid=89268 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89268 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89293]: CRYPTO_KEY_USER pid=89293 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89293 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89317]: CRYPTO_KEY_USER pid=89317 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89317 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89347]: CRYPTO_KEY_USER pid=89347 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89347 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[89371]: CRYPTO_KEY_USER pid=89371 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89371 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:54 managed-node1 audit[89396]: CRYPTO_KEY_USER pid=89396 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89396 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:55 managed-node1 python3[89422]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Dec 03 18:28:55 managed-node1 systemd[1]: Reload requested from client PID 89423 ('systemctl') (unit session-7.scope)...
Dec 03 18:28:55 managed-node1 systemd[1]: Reloading...
Dec 03 18:28:55 managed-node1 systemd[1]: Reloading finished in 271 ms.
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[89477]: CRYPTO_KEY_USER pid=89477 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89477 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[89502]: CRYPTO_KEY_USER pid=89502 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89502 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[89526]: CRYPTO_KEY_USER pid=89526 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89526 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[89556]: CRYPTO_KEY_USER pid=89556 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89556 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[89580]: CRYPTO_KEY_USER pid=89580 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89580 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:55 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:55 managed-node1 audit[89605]: CRYPTO_KEY_USER pid=89605 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89605 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:56 managed-node1 podman[89632]: 2024-12-03 18:28:56.156135163 -0500 EST m=+0.050665354 volume remove systemd-quadlet-demo-mysql
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89640]: CRYPTO_KEY_USER pid=89640 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89640 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89665]: CRYPTO_KEY_USER pid=89665 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89665 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89689]: CRYPTO_KEY_USER pid=89689 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89689 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89719]: CRYPTO_KEY_USER pid=89719 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89719 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89743]: CRYPTO_KEY_USER pid=89743 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89743 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[89768]: CRYPTO_KEY_USER pid=89768 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89768 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:56 managed-node1 python3[89794]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[89802]: CRYPTO_KEY_USER pid=89802 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89802 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:56 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89827]: CRYPTO_KEY_USER pid=89827 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89827 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89851]: CRYPTO_KEY_USER pid=89851 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89851 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89881]: CRYPTO_KEY_USER pid=89881 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89881 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89905]: CRYPTO_KEY_USER pid=89905 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89905 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[89930]: CRYPTO_KEY_USER pid=89930 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89930 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:57 managed-node1 python3[89956]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89964]: CRYPTO_KEY_USER pid=89964 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89964 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[89989]: CRYPTO_KEY_USER pid=89989 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=89989 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[90013]: CRYPTO_KEY_USER pid=90013 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90013 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[90043]: CRYPTO_KEY_USER pid=90043 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90043 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[90067]: CRYPTO_KEY_USER pid=90067 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90067 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:57 managed-node1 audit[90092]: CRYPTO_KEY_USER pid=90092 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90092 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:58 managed-node1 python3[90118]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90126]: CRYPTO_KEY_USER pid=90126 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90126 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90151]: CRYPTO_KEY_USER pid=90151 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90151 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90175]: CRYPTO_KEY_USER pid=90175 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90175 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90205]: CRYPTO_KEY_USER pid=90205 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90205 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90229]: CRYPTO_KEY_USER pid=90229 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90229 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[90254]: CRYPTO_KEY_USER pid=90254 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90254 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:58 managed-node1 python3[90280]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90289]: CRYPTO_KEY_USER pid=90289 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90289 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90314]: CRYPTO_KEY_USER pid=90314 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90314 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90338]: CRYPTO_KEY_USER pid=90338 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90338 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90368]: CRYPTO_KEY_USER pid=90368 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90368 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[90392]: CRYPTO_KEY_USER pid=90392 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90392 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:58 managed-node1 audit[90418]: CRYPTO_KEY_USER pid=90418 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90418 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:58 managed-node1 python3[90444]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90452]: CRYPTO_KEY_USER pid=90452 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90452 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90477]: CRYPTO_KEY_USER pid=90477 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90477 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90501]: CRYPTO_KEY_USER pid=90501 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90501 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90531]: CRYPTO_KEY_USER pid=90531 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90531 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90555]: CRYPTO_KEY_USER pid=90555 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90555 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[90580]: CRYPTO_KEY_USER pid=90580 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90580 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90614]: CRYPTO_KEY_USER pid=90614 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90614 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90639]: CRYPTO_KEY_USER pid=90639 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90639 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90663]: CRYPTO_KEY_USER pid=90663 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90663 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90693]: CRYPTO_KEY_USER pid=90693 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90693 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[90717]: CRYPTO_KEY_USER pid=90717 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90717 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:28:59 managed-node1 audit[90742]: CRYPTO_KEY_USER pid=90742 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90742 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[90777]: CRYPTO_KEY_USER pid=90777 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90777 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[90802]: CRYPTO_KEY_USER pid=90802 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90802 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[90826]: CRYPTO_KEY_USER pid=90826 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90826 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[90856]: CRYPTO_KEY_USER pid=90856 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90856 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[90880]: CRYPTO_KEY_USER pid=90880 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90880 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:00 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:00 managed-node1 audit[90905]: CRYPTO_KEY_USER pid=90905 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=90905 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:00 managed-node1 python3[90931]: ansible-service_facts Invoked
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1606 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1596 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1607 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1608 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1597 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1598 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1609 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1592 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1610 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1599 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1611 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1589 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1612 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1613 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1590 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1591 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1614 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1593 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1615 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1594 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1616 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1602 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1617 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1618 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1603 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1604 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1619 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1620 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1600 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1601 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1621 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1595 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1622 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1605 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1623 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1583 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1624 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1625 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1584 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1585 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1626 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1586 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1627 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1628 op=LOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1587 op=UNLOAD
Dec 03 18:29:01 managed-node1 audit: BPF prog-id=1588 op=UNLOAD
Dec 03 18:29:01 managed-node1 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring.
Dec 03 18:29:03 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:03 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:03 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:03 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:03 managed-node1 audit[91068]: CRYPTO_KEY_USER pid=91068 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91068 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:03 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:03 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[91093]: CRYPTO_KEY_USER pid=91093 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91093 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[91117]: CRYPTO_KEY_USER pid=91117 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91117 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[91147]: CRYPTO_KEY_USER pid=91147 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91147 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[91171]: CRYPTO_KEY_USER pid=91171 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91171 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:04 managed-node1 audit[91196]: CRYPTO_KEY_USER pid=91196 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91196 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:04 managed-node1 python3[91222]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[91225]: CRYPTO_KEY_USER pid=91225 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91225 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:04 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91250]: CRYPTO_KEY_USER pid=91250 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91250 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91274]: CRYPTO_KEY_USER pid=91274 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91274 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91304]: CRYPTO_KEY_USER pid=91304 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91304 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91328]: CRYPTO_KEY_USER pid=91328 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91328 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:06 managed-node1 audit[91353]: CRYPTO_KEY_USER pid=91353 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91353 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:06 managed-node1 python3[91379]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Dec 03 18:29:06 managed-node1 systemd[1]: Reload requested from client PID 91382 ('systemctl') (unit session-7.scope)...
Dec 03 18:29:06 managed-node1 systemd[1]: Reloading...
Dec 03 18:29:06 managed-node1 systemd[1]: Reloading finished in 268 ms.
Dec 03 18:29:06 managed-node1 systemd[1]: quadlet-demo-network.service: Deactivated successfully.
░░ Subject: Unit succeeded
░░ Defined-By: systemd
░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel
░░
░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state.
Dec 03 18:29:06 managed-node1 audit[1]: SERVICE_STOP pid=1 uid=0 auid=4294967295 ses=4294967295 subj=system_u:system_r:init_t:s0 msg='unit=quadlet-demo-network comm="systemd" exe="/usr/lib/systemd/systemd" hostname=? addr=? terminal=? res=success'
Dec 03 18:29:06 managed-node1 systemd[1]: Stopped quadlet-demo-network.service.
░░ Subject: A stop job for unit quadlet-demo-network.service has finished
░░ Defined-By: systemd
░░ Support: https://lists.freedesktop.org/mailman/listinfo/systemd-devel
░░
░░ A stop job for unit quadlet-demo-network.service has finished.
░░
░░ The job identifier is 8045 and the job result is done.
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1629 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1616 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1630 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1631 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1617 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1618 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1632 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1626 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1633 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1634 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1627 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1628 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1635 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1621 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1636 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1611 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1637 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1638 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1612 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1613 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1639 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1623 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1640 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1641 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1624 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1625 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1642 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1610 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1643 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1606 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1644 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1645 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1607 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1608 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1646 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1609 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1647 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1614 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1648 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1649 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1619 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1620 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1650 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1615 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1651 op=LOAD
Dec 03 18:29:06 managed-node1 audit: BPF prog-id=1622 op=UNLOAD
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91438]: CRYPTO_KEY_USER pid=91438 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91438 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:06 managed-node1 audit[91463]: CRYPTO_KEY_USER pid=91463 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91463 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91487]: CRYPTO_KEY_USER pid=91487 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91487 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91517]: CRYPTO_KEY_USER pid=91517 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91517 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91541]: CRYPTO_KEY_USER pid=91541 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91541 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[91566]: CRYPTO_KEY_USER pid=91566 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91566 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:07 managed-node1 python3[91592]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91595]: CRYPTO_KEY_USER pid=91595 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91595 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91620]: CRYPTO_KEY_USER pid=91620 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91620 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91644]: CRYPTO_KEY_USER pid=91644 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91644 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91674]: CRYPTO_KEY_USER pid=91674 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91674 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91698]: CRYPTO_KEY_USER pid=91698 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91698 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[91723]: CRYPTO_KEY_USER pid=91723 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91723 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[91750]: CRYPTO_KEY_USER pid=91750 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91750 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:07 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91775]: CRYPTO_KEY_USER pid=91775 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91775 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91799]: CRYPTO_KEY_USER pid=91799 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91799 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91829]: CRYPTO_KEY_USER pid=91829 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91829 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91853]: CRYPTO_KEY_USER pid=91853 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91853 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[91878]: CRYPTO_KEY_USER pid=91878 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91878 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:08 managed-node1 python3[91904]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91905]: CRYPTO_KEY_USER pid=91905 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91905 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91930]: CRYPTO_KEY_USER pid=91930 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91930 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91954]: CRYPTO_KEY_USER pid=91954 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91954 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[91984]: CRYPTO_KEY_USER pid=91984 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=91984 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[92008]: CRYPTO_KEY_USER pid=92008 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92008 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:08 managed-node1 audit[92033]: CRYPTO_KEY_USER pid=92033 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92033 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:09 managed-node1 python3[92059]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Dec 03 18:29:09 managed-node1 systemd[1]: Reload requested from client PID 92060 ('systemctl') (unit session-7.scope)...
Dec 03 18:29:09 managed-node1 systemd[1]: Reloading...
Dec 03 18:29:09 managed-node1 systemd[1]: Reloading finished in 269 ms.
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92114]: CRYPTO_KEY_USER pid=92114 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92114 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92139]: CRYPTO_KEY_USER pid=92139 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92139 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92163]: CRYPTO_KEY_USER pid=92163 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92163 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92193]: CRYPTO_KEY_USER pid=92193 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92193 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92217]: CRYPTO_KEY_USER pid=92217 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92217 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[92242]: CRYPTO_KEY_USER pid=92242 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92242 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[92277]: CRYPTO_KEY_USER pid=92277 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92277 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:09 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92302]: CRYPTO_KEY_USER pid=92302 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92302 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92326]: CRYPTO_KEY_USER pid=92326 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92326 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92356]: CRYPTO_KEY_USER pid=92356 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92356 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92380]: CRYPTO_KEY_USER pid=92380 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92380 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[92405]: CRYPTO_KEY_USER pid=92405 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92405 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:10 managed-node1 python3[92431]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92438]: CRYPTO_KEY_USER pid=92438 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92438 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92463]: CRYPTO_KEY_USER pid=92463 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92463 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92487]: CRYPTO_KEY_USER pid=92487 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92487 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92517]: CRYPTO_KEY_USER pid=92517 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92517 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[92541]: CRYPTO_KEY_USER pid=92541 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92541 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:10 managed-node1 audit[92566]: CRYPTO_KEY_USER pid=92566 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92566 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:11 managed-node1 python3[92592]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92600]: CRYPTO_KEY_USER pid=92600 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92600 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92625]: CRYPTO_KEY_USER pid=92625 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92625 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92649]: CRYPTO_KEY_USER pid=92649 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92649 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92679]: CRYPTO_KEY_USER pid=92679 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92679 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92703]: CRYPTO_KEY_USER pid=92703 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92703 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[92728]: CRYPTO_KEY_USER pid=92728 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92728 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:11 managed-node1 python3[92754]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92762]: CRYPTO_KEY_USER pid=92762 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92762 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92787]: CRYPTO_KEY_USER pid=92787 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92787 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92811]: CRYPTO_KEY_USER pid=92811 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92811 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92841]: CRYPTO_KEY_USER pid=92841 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92841 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[92865]: CRYPTO_KEY_USER pid=92865 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92865 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:11 managed-node1 audit[92890]: CRYPTO_KEY_USER pid=92890 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92890 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:12 managed-node1 python3[92916]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[92925]: CRYPTO_KEY_USER pid=92925 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92925 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[92950]: CRYPTO_KEY_USER pid=92950 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92950 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[92974]: CRYPTO_KEY_USER pid=92974 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=92974 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93004]: CRYPTO_KEY_USER pid=93004 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93004 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93028]: CRYPTO_KEY_USER pid=93028 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93028 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[93054]: CRYPTO_KEY_USER pid=93054 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93054 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:12 managed-node1 python3[93080]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93088]: CRYPTO_KEY_USER pid=93088 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93088 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93113]: CRYPTO_KEY_USER pid=93113 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93113 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93137]: CRYPTO_KEY_USER pid=93137 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93137 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93167]: CRYPTO_KEY_USER pid=93167 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93167 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[93191]: CRYPTO_KEY_USER pid=93191 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93191 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:12 managed-node1 audit[93216]: CRYPTO_KEY_USER pid=93216 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93216 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93250]: CRYPTO_KEY_USER pid=93250 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93250 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93275]: CRYPTO_KEY_USER pid=93275 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93275 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93299]: CRYPTO_KEY_USER pid=93299 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93299 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93329]: CRYPTO_KEY_USER pid=93329 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93329 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93353]: CRYPTO_KEY_USER pid=93353 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93353 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[93378]: CRYPTO_KEY_USER pid=93378 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93378 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93413]: CRYPTO_KEY_USER pid=93413 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93413 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93438]: CRYPTO_KEY_USER pid=93438 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93438 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93462]: CRYPTO_KEY_USER pid=93462 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93462 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93492]: CRYPTO_KEY_USER pid=93492 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93492 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[93516]: CRYPTO_KEY_USER pid=93516 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93516 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:13 managed-node1 audit[93541]: CRYPTO_KEY_USER pid=93541 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93541 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:14 managed-node1 python3[93567]: ansible-service_facts Invoked
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1652 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1639 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1653 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1654 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1640 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1641 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1655 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1656 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1648 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1649 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1657 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1632 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1658 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1659 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1633 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1634 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1660 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1636 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1661 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1662 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1637 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1638 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1663 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1635 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1664 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1643 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1665 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1666 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1644 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1645 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1667 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1629 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1668 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1669 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1630 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1631 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1670 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1650 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1671 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1651 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1672 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1647 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1673 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1646 op=UNLOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1674 op=LOAD
Dec 03 18:29:14 managed-node1 audit: BPF prog-id=1642 op=UNLOAD
Dec 03 18:29:15 managed-node1 systemd[1]: /usr/lib/systemd/system/lvm-devices-import.service:8: Unknown key 'ConditionPathExists' in section [Service], ignoring.
Dec 03 18:29:16 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:16 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:16 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:16 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:16 managed-node1 audit[93704]: CRYPTO_KEY_USER pid=93704 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93704 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:16 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:16 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[93729]: CRYPTO_KEY_USER pid=93729 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93729 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[93753]: CRYPTO_KEY_USER pid=93753 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93753 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[93783]: CRYPTO_KEY_USER pid=93783 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93783 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[93807]: CRYPTO_KEY_USER pid=93807 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93807 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:17 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:17 managed-node1 audit[93832]: CRYPTO_KEY_USER pid=93832 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93832 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:17 managed-node1 python3[93858]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2
set -x
set -o pipefail
systemctl list-units --plain -l --all | grep quadlet || :
systemctl list-unit-files --all | grep quadlet || :
systemctl list-units --plain --failed -l --all | grep quadlet || :
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=? terminal=/dev/pts/0 res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[93866]: CRYPTO_KEY_USER pid=93866 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93866 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[93891]: CRYPTO_KEY_USER pid=93891 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93891 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[93915]: CRYPTO_KEY_USER pid=93915 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93915 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[93945]: CRYPTO_KEY_USER pid=93945 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93945 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[93969]: CRYPTO_KEY_USER pid=93969 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93969 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=? res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_END pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGOUT pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=ssh res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_LOGIN pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:18 managed-node1 audit[62185]: USER_START pid=62185 uid=0 auid=0 ses=7 subj=system_u:system_r:sshd_t:s0-s0:c0.c1023 msg='op=login id=0 exe="/usr/libexec/openssh/sshd-session" hostname=? addr=10.31.13.170 terminal=/dev/pts/0 res=success'
Dec 03 18:29:18 managed-node1 audit[93994]: CRYPTO_KEY_USER pid=93994 uid=0 auid=0 ses=7 subj=unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023 msg='op=destroy kind=server fp=SHA256:08:c7:11:be:cf:39:f0:f9:0a:5f:42:e6:37:b5:97:a7:45:c0:af:f3:50:26:4e:c1:81:85:80:cb:3d:5b:b0:68 direction=? spid=93994 suid=0 exe="/usr/libexec/openssh/sshd-session" hostname=managed-node1 addr=10.31.13.170 terminal=pts/0 res=success'
Dec 03 18:29:18 managed-node1 python3[94020]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
PLAY RECAP *********************************************************************
managed-node1 : ok=399 changed=44 unreachable=0 failed=1 skipped=377 rescued=1 ignored=0
Tuesday 03 December 2024 18:29:18 -0500 (0:00:00.462) 0:03:27.023 ******
===============================================================================
fedora.linux_system_roles.podman : Ensure container images are present -- 22.88s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Ensure container images are present --- 8.27s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.56s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.39s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.27s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.27s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.21s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : For testing and debugging - services --- 3.16s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197
fedora.linux_system_roles.podman : Stop and disable service ------------- 2.76s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
fedora.linux_system_roles.podman : Stop and disable service ------------- 1.74s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.62s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.54s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152
fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.50s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.43s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.40s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.38s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 1.38s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 1.36s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23
fedora.linux_system_roles.podman : Start service ------------------------ 1.34s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110
fedora.linux_system_roles.podman : Reload systemctl --------------------- 1.34s
/tmp/collections-mM1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82