ansible-playbook [core 2.16.14]
config file = None
configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
ansible collection location = /tmp/collections-pTD
executable location = /usr/local/bin/ansible-playbook
python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12)
jinja version = 3.1.6
libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'json', as we already have a stdout callback.
Skipping callback 'jsonl', as we already have a stdout callback.
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
PLAYBOOK: tests_quadlet_demo.yml ***********************************************
2 plays in /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml
PLAY [all] *********************************************************************
TASK [Include vault variables] *************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5
Monday 07 July 2025 20:15:39 -0400 (0:00:00.026) 0:00:00.026 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_test_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n"
},
"mysql_container_root_password": {
"__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n"
}
},
"ansible_included_var_files": [
"/tmp/podman-Fwa/tests/vars/vault-variables.yml"
],
"changed": false
}
PLAY [Deploy the quadlet demo app] *********************************************
TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
Monday 07 July 2025 20:15:39 -0400 (0:00:00.032) 0:00:00.059 ***********
ok: [managed-node1]
TASK [Test is only supported on x86_64] ****************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38
Monday 07 July 2025 20:15:41 -0400 (0:00:01.242) 0:00:01.301 ***********
skipping: [managed-node1] => {
"false_condition": "ansible_facts[\"architecture\"] != \"x86_64\""
}
TASK [End test] ****************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45
Monday 07 July 2025 20:15:41 -0400 (0:00:00.022) 0:00:01.324 ***********
META: end_play conditional evaluated to False, continuing play
skipping: [managed-node1] => {
"skip_reason": "end_play conditional evaluated to False, continuing play"
}
MSG:
end_play
TASK [Generate certificates] ***************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51
Monday 07 July 2025 20:15:41 -0400 (0:00:00.018) 0:00:01.342 ***********
TASK [fedora.linux_system_roles.certificate : Set version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2
Monday 07 July 2025 20:15:41 -0400 (0:00:00.072) 0:00:01.415 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2
Monday 07 July 2025 20:15:41 -0400 (0:00:00.040) 0:00:01.455 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Check if system is ostree] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10
Monday 07 July 2025 20:15:41 -0400 (0:00:00.053) 0:00:01.508 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15
Monday 07 July 2025 20:15:41 -0400 (0:00:00.494) 0:00:02.003 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Run systemctl] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:22
Monday 07 July 2025 20:15:41 -0400 (0:00:00.021) 0:00:02.025 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"systemctl",
"is-system-running"
],
"delta": "0:00:00.007718",
"end": "2025-07-07 20:15:42.304427",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:15:42.296709"
}
STDOUT:
running
TASK [fedora.linux_system_roles.certificate : Require installed systemd] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:30
Monday 07 July 2025 20:15:42 -0400 (0:00:00.493) 0:00:02.518 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:35
Monday 07 July 2025 20:15:42 -0400 (0:00:00.037) 0:00:02.555 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__certificate_is_booted": true
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:40
Monday 07 July 2025 20:15:42 -0400 (0:00:00.023) 0:00:02.579 ***********
skipping: [managed-node1] => (item=RedHat.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "RedHat.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS_8.yml",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
Monday 07 July 2025 20:15:42 -0400 (0:00:00.037) 0:00:02.616 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15
Monday 07 July 2025 20:15:45 -0400 (0:00:03.023) 0:00:05.639 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:25
Monday 07 July 2025 20:15:48 -0400 (0:00:02.903) 0:00:08.542 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//pre-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:49
Monday 07 July 2025 20:15:48 -0400 (0:00:00.515) 0:00:09.058 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"gid": 0,
"group": "root",
"mode": "0700",
"owner": "root",
"path": "/etc/certmonger//post-scripts",
"secontext": "unconfined_u:object_r:etc_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:76
Monday 07 July 2025 20:15:49 -0400 (0:00:00.398) 0:00:09.457 ***********
ok: [managed-node1] => (item=certmonger) => {
"__certificate_provider": "certmonger",
"ansible_loop_var": "__certificate_provider",
"changed": false,
"enabled": true,
"name": "certmonger",
"state": "started",
"status": {
"ActiveEnterTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"ActiveEnterTimestampMonotonic": "462557342",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus.socket network.target sysinit.target system.slice dbus.service basic.target systemd-journald.socket syslog.target",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"AssertTimestampMonotonic": "462545282",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedorahosted.certmonger",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"ConditionTimestampMonotonic": "462545281",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/certmonger.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "Certificate monitoring and PKI enrollment",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "12419",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"ExecMainStartTimestampMonotonic": "462546241",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/certmonger.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "certmonger.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"InactiveExitTimestampMonotonic": "462546289",
"InvocationID": "c8e8ba10a12040ca94ff9053dd030bb4",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "12419",
"MemoryAccounting": "yes",
"MemoryCurrent": "3104768",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "certmonger.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PIDFile": "/run/certmonger.pid",
"PartOf": "dbus.service",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "system.slice dbus.socket sysinit.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"StateChangeTimestampMonotonic": "462557342",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "1",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"WatchdogTimestamp": "Mon 2025-07-07 20:10:57 EDT",
"WatchdogTimestampMonotonic": "462557340",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86
Monday 07 July 2025 20:15:50 -0400 (0:00:00.795) 0:00:10.252 ***********
changed: [managed-node1] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => {
"ansible_loop_var": "item",
"changed": true,
"item": {
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
}
MSG:
Certificate requested (new).
TASK [fedora.linux_system_roles.certificate : Check if test mode is supported] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:138
Monday 07 July 2025 20:15:51 -0400 (0:00:00.966) 0:00:11.219 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __certificate_is_booted",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143
Monday 07 July 2025 20:15:51 -0400 (0:00:00.038) 0:00:11.257 ***********
ok: [managed-node1] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUpDNW5NY052VWFSaGxaNlNKTEtFbTh3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSXprdwpZams1WTJNM0xUQmtZbVEwTmpreExUZzJOVFkzWVRRNExUa3lZMkV4TWpaa01CNFhEVEkxTURjd09EQXdNVFUxCk1Gb1hEVEkyTURjd09EQXdNVEExTjFvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFucmkyeGlHL3RkS2VMS05DS2g0UnhZSFJhNDMwdys1UQpmcVI3b2t4T0wyMXY4V1B4cCt5R2RkK3NFMlVKbHdOYmNpKzlnajBjOUEzbFI2bm1PbGFYUGE1ejNlMUFheDkrCnZuL2I1dU1wMGxKQVhzeU5UZXhCc1I0bFBXeTcwcEhnWEpLRjJoSTF0OHhZeEV3N2xaZ2owQWI4bkZsOFpmTWoKdUM0QnEreElhSDExQTgxNndCVWdFMnVNb3UydW1ieVk5cFlia0YzZjZPZVY3RnlxVTVQdjFWRGxxalFvN0NkUwpVS0QyNnltRFZNM1VQMFY5UDgvYmRSOGRhSi9MTlFSNlQ1UVpCTnVJRDhEclhpYWhEMDdNMEV6bWlWVlJ0cGZYCmdWVk9nYzgwSlZ3MmFHbzEwRk92UFZSaGZFOVdwODl5S2RMYjduM3hwZnlscFFOWEpuTXhFd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUHNkCms1THViQ2VHQTVGaEFBYmJNNmRZSGkvbE1COEdBMVVkSXdRWU1CYUFGRkNwZFVUK0k4T044NStqWnpHamxZSFEKSjc3cU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQUI2clNaVUx4UjQ3VVhzMkFtVzhkWU1zN3hlQ1RENHdrSQpBYld1Mk9WY3RDUGVmeTNsZThsTjgwSlFxWkxISnRiWUU2Z0xVMEYrNnc4eGRHZmVibmJHeTNDaXdHMU1QSVgxCndLWmd0cm94UFQxK1UzM1NmVytrcHB5Mm02Y0NFMDBMa3A5M0ZSOENFVzdBR3F0eFlScVlTM2F0U1RrbGtHeWQKMjhXL3ladXV6eGo3aElVRllqUnVuWmVudE5MZGxEMk5OSjh2WktGeE50MFBLalh2ZTFpSzB1bFN5MSsvTGh5cQpPeWgyVXJaeEJHZ1NrVnIyc0ZWaGgzSUVwVCt3alRNMldkUmV2K1JueWFIN2NlTk90WGsvSmlHaXBoNS9VMUMvCjIwN2tHZHpzTk1QTThxU2RQeWJyeXVaTDdkVTZJOFBCVldFeENtTXpKWUE0QzA1emJKcHEKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"cert",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
ok: [managed-node1] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2UUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktjd2dnU2pBZ0VBQW9JQkFRQ2V1TGJHSWIrMTBwNHMKbzBJcUhoSEZnZEZyamZURDdsQitwSHVpVEU0dmJXL3hZL0duN0laMTM2d1RaUW1YQTF0eUw3MkNQUnowRGVWSApxZVk2VnBjOXJuUGQ3VUJySDM2K2Y5dm00eW5TVWtCZXpJMU43RUd4SGlVOWJMdlNrZUJja29YYUVqVzN6RmpFClREdVZtQ1BRQnZ5Y1dYeGw4eU80TGdHcjdFaG9mWFVEelhyQUZTQVRhNHlpN2E2WnZKajJsaHVRWGQvbzU1WHMKWEtwVGsrL1ZVT1dxTkNqc0oxSlFvUGJyS1lOVXpkUS9SWDAvejl0MUh4MW9uOHMxQkhwUGxCa0UyNGdQd090ZQpKcUVQVHN6UVRPYUpWVkcybDllQlZVNkJ6elFsWERab2FqWFFVNjg5VkdGOFQxYW56M0lwMHR2dWZmR2wvS1dsCkExY21jekVUQWdNQkFBRUNnZ0VBWS9vVDN1K3BNMTFzbHM5clFONVo4Vi9aU3ZkTEF4T2REbmlMQytzbW9JYzkKcVA1OFhIc0hJejUxeEcyelFiVXlwOVZjZUtvSUQ4Nms2cVFoU2dQN0ZqWko4N09iRFI3MGxnZ3h1dGpDSWlILwpPL2Q2MW1lSDVZb0lpUTF4cmF5SGQ2Z3Z1ZklZZGlLNGswVTFqd0FiSnRFUDAyZ21qSTAvcGFjTkZIOHNDT3huCmhZREdJaHJ4aVdxTzkvcDNIYXhvZFR0L1RGRFhlcUYxREI2ajg5OEM5RDBqN2Q5V3NjejhHNHV4UEo5ZEdvYjYKMVpJZ1lzQjBiS1NsZ1hoNmxUL2ZXN2ptYlFIemtKRFFzKzJkbGV5NXNzSkVIRGNjN0hBL3N4UzlES3lWaVU4VwoxbFdwNnB5S28zTytLbW0vWlZSVXZYcG9CRnBVaE54VkROY0hGWHEreVFLQmdRRE40S3ArOHJETmdHblVLdU9nCndYMTBxTGYrWVN2TVhkSWMzNDZaU2hsWEdLUFYrejQxSW9DZE1xbzd1SlhaT2hYVUh1SUJnWWs3dzdjQklWUGUKQjhnMlVBNnBFMzArM0tnSnRwM2x1YkxFbHNlKzBBRWczV2JhM1E0b0VYVzIvczJ0aEdlZXU5S1lpYXdpYW9YRwpkR1lyWkZDQUtVMEFuWGU0M1VRNnMzMUMvUUtCZ1FERlhRc2lvVWw4TUlwUkFmTVR5bjZ4WlBzRDg3ZVUwOENWCmVVSEl3MEs2czFZTVlsd0JWajdCQlR2ZHBQYlFiR3BjNkpCWU5jSmRqZkRBVGdnS0Z0MDRITkNuTkQrQlBRcTQKMVF0UlV3Y2RNZC9Ba3JTRndzRHRQZGpBaEN2b1F3a3lrSVlWUlV0RmVRaHplR2llTS83d3krYm1FYmJjajYrVgp2SUMrZnFVcFR3S0JnSEtKMUxFdCtqbEtnUlkrQ2tnTWZleWlWVU1iOG84UGYyNzYzVTNoRUxvSkJsVnRJdzNTCnRUVS8wNy9JRU80Nng1d3MxbFk2S0F4czlXcVdqUjlRdXcya2VwT01PVGo0OEMvL1BsZHBlZEN2aGs5ZFQ5TmwKK2V3MzRjRmUvVnVpNVBEdFllekkyQUJ2VDZvU0UzejJrMFQrMlY2RUc4Z0UwMit4QWwyYU1FMU5Bb0dCQUtUQQppdThabkhKalF0Tmt3VG1nTENxQmxWa3pOZWl5NjJ6YmtDSzdmUlRGWit0bW9FOHRZNjFWWlNnYzhUd0RoTGFTCkhRekZzNzdOSWxBbDAyZzR0ZXFGZzFoUXNVSjlabG5zM2tuQStZdFJldXNTakVlc3o4ZzdmSmtZdkY1aXQyYlMKWmx1VFFNWjV4cWc5dDNkNUNEU1N3ZWtFbUpWK0RLRCtmcXFHcGZEWkFvR0FCL09PN2IzRjVrK3J2WElveW1tYwpZV25LZFp6TzErZ2RyREZvbmlNSlVQOTVYNXA5V3dIUjVRZ0V6TU81OVRkMDU5Z05sUWZ5di9lNm5JZWRDbExFCnA4eXlsdDNaSEs0NGNNamZyK3hPU2xXd0tGMU5pTWtjeGRMLzBDL1QrYTVsdmY5Unc1MXRTU3gvS1ZERzF5WWMKcFkwbWlDa2hyQkR6aXNsbWhrMWZDb3M9Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K",
"encoding": "base64",
"item": [
"key",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/private/quadlet_demo.key"
}
ok: [managed-node1] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => {
"ansible_loop_var": "item",
"changed": false,
"content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnekNDQW11Z0F3SUJBZ0lSQUpDNW5NY052VWFSaGxaNlNKTEtFbTh3RFFZSktvWklodmNOQVFFTEJRQXcKVURFZ01CNEdBMVVFQXd3WFRHOWpZV3dnVTJsbmJtbHVaeUJCZFhSb2IzSnBkSGt4TERBcUJnTlZCQU1NSXprdwpZams1WTJNM0xUQmtZbVEwTmpreExUZzJOVFkzWVRRNExUa3lZMkV4TWpaa01CNFhEVEkxTURjd09EQXdNVFUxCk1Gb1hEVEkyTURjd09EQXdNVEExTjFvd0ZERVNNQkFHQTFVRUF4TUpiRzlqWVd4b2IzTjBNSUlCSWpBTkJna3EKaGtpRzl3MEJBUUVGQUFPQ0FROEFNSUlCQ2dLQ0FRRUFucmkyeGlHL3RkS2VMS05DS2g0UnhZSFJhNDMwdys1UQpmcVI3b2t4T0wyMXY4V1B4cCt5R2RkK3NFMlVKbHdOYmNpKzlnajBjOUEzbFI2bm1PbGFYUGE1ejNlMUFheDkrCnZuL2I1dU1wMGxKQVhzeU5UZXhCc1I0bFBXeTcwcEhnWEpLRjJoSTF0OHhZeEV3N2xaZ2owQWI4bkZsOFpmTWoKdUM0QnEreElhSDExQTgxNndCVWdFMnVNb3UydW1ieVk5cFlia0YzZjZPZVY3RnlxVTVQdjFWRGxxalFvN0NkUwpVS0QyNnltRFZNM1VQMFY5UDgvYmRSOGRhSi9MTlFSNlQ1UVpCTnVJRDhEclhpYWhEMDdNMEV6bWlWVlJ0cGZYCmdWVk9nYzgwSlZ3MmFHbzEwRk92UFZSaGZFOVdwODl5S2RMYjduM3hwZnlscFFOWEpuTXhFd0lEQVFBQm80R1QKTUlHUU1Bc0dBMVVkRHdRRUF3SUZvREFVQmdOVkhSRUVEVEFMZ2dsc2IyTmhiR2h2YzNRd0hRWURWUjBsQkJZdwpGQVlJS3dZQkJRVUhBd0VHQ0NzR0FRVUZCd01DTUF3R0ExVWRFd0VCL3dRQ01BQXdIUVlEVlIwT0JCWUVGUHNkCms1THViQ2VHQTVGaEFBYmJNNmRZSGkvbE1COEdBMVVkSXdRWU1CYUFGRkNwZFVUK0k4T044NStqWnpHamxZSFEKSjc3cU1BMEdDU3FHU0liM0RRRUJDd1VBQTRJQkFRQUI2clNaVUx4UjQ3VVhzMkFtVzhkWU1zN3hlQ1RENHdrSQpBYld1Mk9WY3RDUGVmeTNsZThsTjgwSlFxWkxISnRiWUU2Z0xVMEYrNnc4eGRHZmVibmJHeTNDaXdHMU1QSVgxCndLWmd0cm94UFQxK1UzM1NmVytrcHB5Mm02Y0NFMDBMa3A5M0ZSOENFVzdBR3F0eFlScVlTM2F0U1RrbGtHeWQKMjhXL3ladXV6eGo3aElVRllqUnVuWmVudE5MZGxEMk5OSjh2WktGeE50MFBLalh2ZTFpSzB1bFN5MSsvTGh5cQpPeWgyVXJaeEJHZ1NrVnIyc0ZWaGgzSUVwVCt3alRNMldkUmV2K1JueWFIN2NlTk90WGsvSmlHaXBoNS9VMUMvCjIwN2tHZHpzTk1QTThxU2RQeWJyeXVaTDdkVTZJOFBCVldFeENtTXpKWUE0QzA1emJKcHEKLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=",
"encoding": "base64",
"item": [
"ca",
{
"ca": "self-sign",
"dns": [
"localhost"
],
"name": "quadlet_demo"
}
],
"source": "/etc/pki/tls/certs/quadlet_demo.crt"
}
TASK [fedora.linux_system_roles.certificate : Reset certificate_test_certs] ****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:151
Monday 07 July 2025 20:15:52 -0400 (0:00:01.191) 0:00:12.448 ***********
ok: [managed-node1] => {
"ansible_facts": {
"certificate_test_certs": {}
},
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Create return data] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:155
Monday 07 July 2025 20:15:52 -0400 (0:00:00.036) 0:00:12.485 ***********
ok: [managed-node1] => (item=quadlet_demo) => {
"ansible_facts": {
"certificate_test_certs": {
"quadlet_demo": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCeuLbGIb+10p4s\no0IqHhHFgdFrjfTD7lB+pHuiTE4vbW/xY/Gn7IZ136wTZQmXA1tyL72CPRz0DeVH\nqeY6Vpc9rnPd7UBrH36+f9vm4ynSUkBezI1N7EGxHiU9bLvSkeBckoXaEjW3zFjE\nTDuVmCPQBvycWXxl8yO4LgGr7EhofXUDzXrAFSATa4yi7a6ZvJj2lhuQXd/o55Xs\nXKpTk+/VUOWqNCjsJ1JQoPbrKYNUzdQ/RX0/z9t1Hx1on8s1BHpPlBkE24gPwOte\nJqEPTszQTOaJVVG2l9eBVU6BzzQlXDZoajXQU689VGF8T1anz3Ip0tvuffGl/KWl\nA1cmczETAgMBAAECggEAY/oT3u+pM11sls9rQN5Z8V/ZSvdLAxOdDniLC+smoIc9\nqP58XHsHIz51xG2zQbUyp9VceKoID86k6qQhSgP7FjZJ87ObDR70lggxutjCIiH/\nO/d61meH5YoIiQ1xrayHd6gvufIYdiK4k0U1jwAbJtEP02gmjI0/pacNFH8sCOxn\nhYDGIhrxiWqO9/p3HaxodTt/TFDXeqF1DB6j898C9D0j7d9Wscz8G4uxPJ9dGob6\n1ZIgYsB0bKSlgXh6lT/fW7jmbQHzkJDQs+2dley5ssJEHDcc7HA/sxS9DKyViU8W\n1lWp6pyKo3O+Kmm/ZVRUvXpoBFpUhNxVDNcHFXq+yQKBgQDN4Kp+8rDNgGnUKuOg\nwX10qLf+YSvMXdIc346ZShlXGKPV+z41IoCdMqo7uJXZOhXUHuIBgYk7w7cBIVPe\nB8g2UA6pE30+3KgJtp3lubLElse+0AEg3Wba3Q4oEXW2/s2thGeeu9KYiawiaoXG\ndGYrZFCAKU0AnXe43UQ6s31C/QKBgQDFXQsioUl8MIpRAfMTyn6xZPsD87eU08CV\neUHIw0K6s1YMYlwBVj7BBTvdpPbQbGpc6JBYNcJdjfDATggKFt04HNCnND+BPQq4\n1QtRUwcdMd/AkrSFwsDtPdjAhCvoQwkykIYVRUtFeQhzeGieM/7wy+bmEbbcj6+V\nvIC+fqUpTwKBgHKJ1LEt+jlKgRY+CkgMfeyiVUMb8o8Pf2763U3hELoJBlVtIw3S\ntTU/07/IEO46x5ws1lY6KAxs9WqWjR9Quw2kepOMOTj48C//PldpedCvhk9dT9Nl\n+ew34cFe/Vui5PDtYezI2ABvT6oSE3z2k0T+2V6EG8gE02+xAl2aME1NAoGBAKTA\niu8ZnHJjQtNkwTmgLCqBlVkzNeiy62zbkCK7fRTFZ+tmoE8tY61VZSgc8TwDhLaS\nHQzFs77NIlAl02g4teqFg1hQsUJ9Zlns3knA+YtReusSjEesz8g7fJkYvF5it2bS\nZluTQMZ5xqg9t3d5CDSSwekEmJV+DKD+fqqGpfDZAoGAB/OO7b3F5k+rvXIoymmc\nYWnKdZzO1+gdrDFoniMJUP95X5p9WwHR5QgEzMO59Td059gNlQfyv/e6nIedClLE\np8yylt3ZHK44cMjfr+xOSlWwKF1NiMkcxdL/0C/T+a5lvf9Rw51tSSx/KVDG1yYc\npY0miCkhrBDzislmhk1fCos=\n-----END PRIVATE KEY-----\n"
}
}
},
"ansible_loop_var": "cert_name",
"cert_name": "quadlet_demo",
"changed": false
}
TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:169
Monday 07 July 2025 20:15:52 -0400 (0:00:00.076) 0:00:12.561 ***********
ok: [managed-node1] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCeuLbGIb+10p4s\no0IqHhHFgdFrjfTD7lB+pHuiTE4vbW/xY/Gn7IZ136wTZQmXA1tyL72CPRz0DeVH\nqeY6Vpc9rnPd7UBrH36+f9vm4ynSUkBezI1N7EGxHiU9bLvSkeBckoXaEjW3zFjE\nTDuVmCPQBvycWXxl8yO4LgGr7EhofXUDzXrAFSATa4yi7a6ZvJj2lhuQXd/o55Xs\nXKpTk+/VUOWqNCjsJ1JQoPbrKYNUzdQ/RX0/z9t1Hx1on8s1BHpPlBkE24gPwOte\nJqEPTszQTOaJVVG2l9eBVU6BzzQlXDZoajXQU689VGF8T1anz3Ip0tvuffGl/KWl\nA1cmczETAgMBAAECggEAY/oT3u+pM11sls9rQN5Z8V/ZSvdLAxOdDniLC+smoIc9\nqP58XHsHIz51xG2zQbUyp9VceKoID86k6qQhSgP7FjZJ87ObDR70lggxutjCIiH/\nO/d61meH5YoIiQ1xrayHd6gvufIYdiK4k0U1jwAbJtEP02gmjI0/pacNFH8sCOxn\nhYDGIhrxiWqO9/p3HaxodTt/TFDXeqF1DB6j898C9D0j7d9Wscz8G4uxPJ9dGob6\n1ZIgYsB0bKSlgXh6lT/fW7jmbQHzkJDQs+2dley5ssJEHDcc7HA/sxS9DKyViU8W\n1lWp6pyKo3O+Kmm/ZVRUvXpoBFpUhNxVDNcHFXq+yQKBgQDN4Kp+8rDNgGnUKuOg\nwX10qLf+YSvMXdIc346ZShlXGKPV+z41IoCdMqo7uJXZOhXUHuIBgYk7w7cBIVPe\nB8g2UA6pE30+3KgJtp3lubLElse+0AEg3Wba3Q4oEXW2/s2thGeeu9KYiawiaoXG\ndGYrZFCAKU0AnXe43UQ6s31C/QKBgQDFXQsioUl8MIpRAfMTyn6xZPsD87eU08CV\neUHIw0K6s1YMYlwBVj7BBTvdpPbQbGpc6JBYNcJdjfDATggKFt04HNCnND+BPQq4\n1QtRUwcdMd/AkrSFwsDtPdjAhCvoQwkykIYVRUtFeQhzeGieM/7wy+bmEbbcj6+V\nvIC+fqUpTwKBgHKJ1LEt+jlKgRY+CkgMfeyiVUMb8o8Pf2763U3hELoJBlVtIw3S\ntTU/07/IEO46x5ws1lY6KAxs9WqWjR9Quw2kepOMOTj48C//PldpedCvhk9dT9Nl\n+ew34cFe/Vui5PDtYezI2ABvT6oSE3z2k0T+2V6EG8gE02+xAl2aME1NAoGBAKTA\niu8ZnHJjQtNkwTmgLCqBlVkzNeiy62zbkCK7fRTFZ+tmoE8tY61VZSgc8TwDhLaS\nHQzFs77NIlAl02g4teqFg1hQsUJ9Zlns3knA+YtReusSjEesz8g7fJkYvF5it2bS\nZluTQMZ5xqg9t3d5CDSSwekEmJV+DKD+fqqGpfDZAoGAB/OO7b3F5k+rvXIoymmc\nYWnKdZzO1+gdrDFoniMJUP95X5p9WwHR5QgEzMO59Td059gNlQfyv/e6nIedClLE\np8yylt3ZHK44cMjfr+xOSlWwKF1NiMkcxdL/0C/T+a5lvf9Rw51tSSx/KVDG1yYc\npY0miCkhrBDzislmhk1fCos=\n-----END PRIVATE KEY-----\n', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n'}) => {
"ansible_loop_var": "item",
"changed": false,
"cmd": [
"getcert",
"stop-tracking",
"-f",
"/etc/pki/tls/certs/quadlet_demo.crt"
],
"delta": "0:00:00.032238",
"end": "2025-07-07 20:15:52.766573",
"item": {
"ca": "/etc/pki/tls/certs/quadlet_demo.crt",
"ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n",
"cert": "/etc/pki/tls/certs/quadlet_demo.crt",
"cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgzCCAmugAwIBAgIRAJC5nMcNvUaRhlZ6SJLKEm8wDQYJKoZIhvcNAQELBQAw\nUDEgMB4GA1UEAwwXTG9jYWwgU2lnbmluZyBBdXRob3JpdHkxLDAqBgNVBAMMIzkw\nYjk5Y2M3LTBkYmQ0NjkxLTg2NTY3YTQ4LTkyY2ExMjZkMB4XDTI1MDcwODAwMTU1\nMFoXDTI2MDcwODAwMTA1N1owFDESMBAGA1UEAxMJbG9jYWxob3N0MIIBIjANBgkq\nhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnri2xiG/tdKeLKNCKh4RxYHRa430w+5Q\nfqR7okxOL21v8WPxp+yGdd+sE2UJlwNbci+9gj0c9A3lR6nmOlaXPa5z3e1Aax9+\nvn/b5uMp0lJAXsyNTexBsR4lPWy70pHgXJKF2hI1t8xYxEw7lZgj0Ab8nFl8ZfMj\nuC4Bq+xIaH11A816wBUgE2uMou2umbyY9pYbkF3f6OeV7FyqU5Pv1VDlqjQo7CdS\nUKD26ymDVM3UP0V9P8/bdR8daJ/LNQR6T5QZBNuID8DrXiahD07M0EzmiVVRtpfX\ngVVOgc80JVw2aGo10FOvPVRhfE9Wp89yKdLb7n3xpfylpQNXJnMxEwIDAQABo4GT\nMIGQMAsGA1UdDwQEAwIFoDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0lBBYw\nFAYIKwYBBQUHAwEGCCsGAQUFBwMCMAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFPsd\nk5LubCeGA5FhAAbbM6dYHi/lMB8GA1UdIwQYMBaAFFCpdUT+I8ON85+jZzGjlYHQ\nJ77qMA0GCSqGSIb3DQEBCwUAA4IBAQAB6rSZULxR47UXs2AmW8dYMs7xeCTD4wkI\nAbWu2OVctCPefy3le8lN80JQqZLHJtbYE6gLU0F+6w8xdGfebnbGy3CiwG1MPIX1\nwKZgtroxPT1+U33SfW+kppy2m6cCE00Lkp93FR8CEW7AGqtxYRqYS3atSTklkGyd\n28W/yZuuzxj7hIUFYjRunZentNLdlD2NNJ8vZKFxNt0PKjXve1iK0ulSy1+/Lhyq\nOyh2UrZxBGgSkVr2sFVhh3IEpT+wjTM2WdRev+RnyaH7ceNOtXk/JiGiph5/U1C/\n207kGdzsNMPM8qSdPybryuZL7dU6I8PBVWExCmMzJYA4C05zbJpq\n-----END CERTIFICATE-----\n",
"key": "/etc/pki/tls/private/quadlet_demo.key",
"key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCeuLbGIb+10p4s\no0IqHhHFgdFrjfTD7lB+pHuiTE4vbW/xY/Gn7IZ136wTZQmXA1tyL72CPRz0DeVH\nqeY6Vpc9rnPd7UBrH36+f9vm4ynSUkBezI1N7EGxHiU9bLvSkeBckoXaEjW3zFjE\nTDuVmCPQBvycWXxl8yO4LgGr7EhofXUDzXrAFSATa4yi7a6ZvJj2lhuQXd/o55Xs\nXKpTk+/VUOWqNCjsJ1JQoPbrKYNUzdQ/RX0/z9t1Hx1on8s1BHpPlBkE24gPwOte\nJqEPTszQTOaJVVG2l9eBVU6BzzQlXDZoajXQU689VGF8T1anz3Ip0tvuffGl/KWl\nA1cmczETAgMBAAECggEAY/oT3u+pM11sls9rQN5Z8V/ZSvdLAxOdDniLC+smoIc9\nqP58XHsHIz51xG2zQbUyp9VceKoID86k6qQhSgP7FjZJ87ObDR70lggxutjCIiH/\nO/d61meH5YoIiQ1xrayHd6gvufIYdiK4k0U1jwAbJtEP02gmjI0/pacNFH8sCOxn\nhYDGIhrxiWqO9/p3HaxodTt/TFDXeqF1DB6j898C9D0j7d9Wscz8G4uxPJ9dGob6\n1ZIgYsB0bKSlgXh6lT/fW7jmbQHzkJDQs+2dley5ssJEHDcc7HA/sxS9DKyViU8W\n1lWp6pyKo3O+Kmm/ZVRUvXpoBFpUhNxVDNcHFXq+yQKBgQDN4Kp+8rDNgGnUKuOg\nwX10qLf+YSvMXdIc346ZShlXGKPV+z41IoCdMqo7uJXZOhXUHuIBgYk7w7cBIVPe\nB8g2UA6pE30+3KgJtp3lubLElse+0AEg3Wba3Q4oEXW2/s2thGeeu9KYiawiaoXG\ndGYrZFCAKU0AnXe43UQ6s31C/QKBgQDFXQsioUl8MIpRAfMTyn6xZPsD87eU08CV\neUHIw0K6s1YMYlwBVj7BBTvdpPbQbGpc6JBYNcJdjfDATggKFt04HNCnND+BPQq4\n1QtRUwcdMd/AkrSFwsDtPdjAhCvoQwkykIYVRUtFeQhzeGieM/7wy+bmEbbcj6+V\nvIC+fqUpTwKBgHKJ1LEt+jlKgRY+CkgMfeyiVUMb8o8Pf2763U3hELoJBlVtIw3S\ntTU/07/IEO46x5ws1lY6KAxs9WqWjR9Quw2kepOMOTj48C//PldpedCvhk9dT9Nl\n+ew34cFe/Vui5PDtYezI2ABvT6oSE3z2k0T+2V6EG8gE02+xAl2aME1NAoGBAKTA\niu8ZnHJjQtNkwTmgLCqBlVkzNeiy62zbkCK7fRTFZ+tmoE8tY61VZSgc8TwDhLaS\nHQzFs77NIlAl02g4teqFg1hQsUJ9Zlns3knA+YtReusSjEesz8g7fJkYvF5it2bS\nZluTQMZ5xqg9t3d5CDSSwekEmJV+DKD+fqqGpfDZAoGAB/OO7b3F5k+rvXIoymmc\nYWnKdZzO1+gdrDFoniMJUP95X5p9WwHR5QgEzMO59Td059gNlQfyv/e6nIedClLE\np8yylt3ZHK44cMjfr+xOSlWwKF1NiMkcxdL/0C/T+a5lvf9Rw51tSSx/KVDG1yYc\npY0miCkhrBDzislmhk1fCos=\n-----END PRIVATE KEY-----\n"
},
"rc": 0,
"start": "2025-07-07 20:15:52.734335"
}
STDOUT:
Request "20250708001550" removed.
TASK [fedora.linux_system_roles.certificate : Remove files] ********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174
Monday 07 July 2025 20:15:52 -0400 (0:00:00.419) 0:00:12.980 ***********
changed: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
changed: [managed-node1] => (item=/etc/pki/tls/private/quadlet_demo.key) => {
"ansible_loop_var": "item",
"changed": true,
"item": "/etc/pki/tls/private/quadlet_demo.key",
"path": "/etc/pki/tls/private/quadlet_demo.key",
"state": "absent"
}
ok: [managed-node1] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => {
"ansible_loop_var": "item",
"changed": false,
"item": "/etc/pki/tls/certs/quadlet_demo.crt",
"path": "/etc/pki/tls/certs/quadlet_demo.crt",
"state": "absent"
}
TASK [Run the role] ************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62
Monday 07 July 2025 20:15:53 -0400 (0:00:01.014) 0:00:13.995 ***********
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 07 July 2025 20:15:53 -0400 (0:00:00.065) 0:00:14.060 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 07 July 2025 20:15:53 -0400 (0:00:00.024) 0:00:14.085 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 07 July 2025 20:15:53 -0400 (0:00:00.040) 0:00:14.125 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 07 July 2025 20:15:54 -0400 (0:00:00.366) 0:00:14.491 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 07 July 2025 20:15:54 -0400 (0:00:00.034) 0:00:14.525 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 07 July 2025 20:15:54 -0400 (0:00:00.372) 0:00:14.898 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 07 July 2025 20:15:54 -0400 (0:00:00.025) 0:00:14.923 ***********
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
ok: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 07 July 2025 20:15:54 -0400 (0:00:00.044) 0:00:14.968 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 07 July 2025 20:15:56 -0400 (0:00:01.727) 0:00:16.695 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 07 July 2025 20:15:56 -0400 (0:00:00.065) 0:00:16.761 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 07 July 2025 20:15:56 -0400 (0:00:00.054) 0:00:16.816 ***********
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 07 July 2025 20:15:56 -0400 (0:00:00.047) 0:00:16.863 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 07 July 2025 20:15:56 -0400 (0:00:00.051) 0:00:16.915 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 07 July 2025 20:15:56 -0400 (0:00:00.063) 0:00:16.978 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.028969",
"end": "2025-07-07 20:15:57.149309",
"rc": 0,
"start": "2025-07-07 20:15:57.120340"
}
STDOUT:
podman version 4.9.4-dev
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 07 July 2025 20:15:57 -0400 (0:00:00.416) 0:00:17.395 ***********
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "4.9.4-dev"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 07 July 2025 20:15:57 -0400 (0:00:00.052) 0:00:17.448 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 07 July 2025 20:15:57 -0400 (0:00:00.049) 0:00:17.497 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 07 July 2025 20:15:57 -0400 (0:00:00.145) 0:00:17.642 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 07 July 2025 20:15:57 -0400 (0:00:00.089) 0:00:17.732 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 07 July 2025 20:15:57 -0400 (0:00:00.062) 0:00:17.794 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 07 July 2025 20:15:57 -0400 (0:00:00.080) 0:00:17.875 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:15:57 -0400 (0:00:00.098) 0:00:17.973 ***********
ok: [managed-node1] => {
"ansible_facts": {
"getent_passwd": {
"root": [
"x",
"0",
"0",
"root",
"/root",
"/bin/bash"
]
}
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:15:58 -0400 (0:00:00.546) 0:00:18.519 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:15:58 -0400 (0:00:00.057) 0:00:18.577 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:15:58 -0400 (0:00:00.071) 0:00:18.649 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:15:58 -0400 (0:00:00.376) 0:00:19.025 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:15:58 -0400 (0:00:00.031) 0:00:19.057 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:15:58 -0400 (0:00:00.032) 0:00:19.089 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:15:58 -0400 (0:00:00.032) 0:00:19.122 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:15:58 -0400 (0:00:00.032) 0:00:19.155 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:15:59 -0400 (0:00:00.032) 0:00:19.187 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:15:59 -0400 (0:00:00.031) 0:00:19.219 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:15:59 -0400 (0:00:00.070) 0:00:19.289 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 07 July 2025 20:15:59 -0400 (0:00:00.032) 0:00:19.321 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_parent_mode": "0755",
"__podman_parent_path": "/etc/containers",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126
Monday 07 July 2025 20:15:59 -0400 (0:00:00.065) 0:00:19.387 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 07 July 2025 20:15:59 -0400 (0:00:00.058) 0:00:19.445 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 07 July 2025 20:15:59 -0400 (0:00:00.033) 0:00:19.478 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Monday 07 July 2025 20:15:59 -0400 (0:00:00.031) 0:00:19.510 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 07 July 2025 20:15:59 -0400 (0:00:00.065) 0:00:19.575 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 07 July 2025 20:15:59 -0400 (0:00:00.047) 0:00:19.623 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132
Monday 07 July 2025 20:15:59 -0400 (0:00:00.053) 0:00:19.677 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7
Monday 07 July 2025 20:15:59 -0400 (0:00:00.104) 0:00:19.781 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15
Monday 07 July 2025 20:15:59 -0400 (0:00:00.060) 0:00:19.842 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135
Monday 07 July 2025 20:15:59 -0400 (0:00:00.034) 0:00:19.876 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8
Monday 07 July 2025 20:15:59 -0400 (0:00:00.097) 0:00:19.975 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16
Monday 07 July 2025 20:15:59 -0400 (0:00:00.052) 0:00:20.027 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21
Monday 07 July 2025 20:15:59 -0400 (0:00:00.039) 0:00:20.067 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27
Monday 07 July 2025 20:15:59 -0400 (0:00:00.077) 0:00:20.144 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141
Monday 07 July 2025 20:16:00 -0400 (0:00:00.034) 0:00:20.179 ***********
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 07 July 2025 20:16:00 -0400 (0:00:00.103) 0:00:20.282 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 07 July 2025 20:16:00 -0400 (0:00:00.080) 0:00:20.362 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 07 July 2025 20:16:00 -0400 (0:00:00.049) 0:00:20.412 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 07 July 2025 20:16:00 -0400 (0:00:00.366) 0:00:20.778 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_ostree": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 07 July 2025 20:16:00 -0400 (0:00:00.065) 0:00:20.844 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"exists": false
}
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 07 July 2025 20:16:01 -0400 (0:00:00.353) 0:00:21.198 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_transactional": false
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Run systemctl] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34
Monday 07 July 2025 20:16:01 -0400 (0:00:00.037) 0:00:21.236 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"systemctl",
"is-system-running"
],
"delta": "0:00:00.008223",
"end": "2025-07-07 20:16:01.358295",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:16:01.350072"
}
STDOUT:
running
TASK [fedora.linux_system_roles.firewall : Require installed systemd] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41
Monday 07 July 2025 20:16:01 -0400 (0:00:00.350) 0:00:21.587 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46
Monday 07 July 2025 20:16:01 -0400 (0:00:00.033) 0:00:21.621 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_is_booted": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
Monday 07 July 2025 20:16:01 -0400 (0:00:00.033) 0:00:21.654 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63
Monday 07 July 2025 20:16:04 -0400 (0:00:02.854) 0:00:24.509 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68
Monday 07 July 2025 20:16:04 -0400 (0:00:00.031) 0:00:24.541 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73
Monday 07 July 2025 20:16:04 -0400 (0:00:00.044) 0:00:24.585 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 07 July 2025 20:16:04 -0400 (0:00:00.077) 0:00:24.663 ***********
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14
Monday 07 July 2025 20:16:04 -0400 (0:00:00.036) 0:00:24.699 ***********
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24
Monday 07 July 2025 20:16:04 -0400 (0:00:00.038) 0:00:24.738 ***********
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "polkit.service sysinit.target dbus.socket basic.target system.slice dbus.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "ipset.service iptables.service shutdown.target nftables.service ip6tables.service ebtables.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
Monday 07 July 2025 20:16:05 -0400 (0:00:00.502) 0:00:25.241 ***********
changed: [managed-node1] => {
"changed": true,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "system.slice sysinit.target basic.target dbus.socket dbus.service polkit.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "network-pre.target shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "iptables.service ip6tables.service ebtables.service shutdown.target ipset.service nftables.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice dbus.socket",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "disabled",
"UtmpMode": "init",
"Wants": "network-pre.target",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36
Monday 07 July 2025 20:16:06 -0400 (0:00:00.970) 0:00:26.211 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/libexec/platform-python",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45
Monday 07 July 2025 20:16:06 -0400 (0:00:00.041) 0:00:26.253 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58
Monday 07 July 2025 20:16:06 -0400 (0:00:00.029) 0:00:26.282 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
Monday 07 July 2025 20:16:06 -0400 (0:00:00.030) 0:00:26.313 ***********
changed: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
changed: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": true,
"ansible_loop_var": "item",
"changed": true,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126
Monday 07 July 2025 20:16:07 -0400 (0:00:01.241) 0:00:27.555 ***********
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137
Monday 07 July 2025 20:16:07 -0400 (0:00:00.047) 0:00:27.602 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146
Monday 07 July 2025 20:16:07 -0400 (0:00:00.033) 0:00:27.636 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152
Monday 07 July 2025 20:16:07 -0400 (0:00:00.031) 0:00:27.667 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161
Monday 07 July 2025 20:16:07 -0400 (0:00:00.032) 0:00:27.699 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172
Monday 07 July 2025 20:16:07 -0400 (0:00:00.028) 0:00:27.728 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178
Monday 07 July 2025 20:16:07 -0400 (0:00:00.027) 0:00:27.756 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148
Monday 07 July 2025 20:16:07 -0400 (0:00:00.044) 0:00:27.800 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155
Monday 07 July 2025 20:16:07 -0400 (0:00:00.030) 0:00:27.830 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159
Monday 07 July 2025 20:16:07 -0400 (0:00:00.028) 0:00:27.859 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168
Monday 07 July 2025 20:16:07 -0400 (0:00:00.062) 0:00:27.921 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177
Monday 07 July 2025 20:16:07 -0400 (0:00:00.026) 0:00:27.948 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:07 -0400 (0:00:00.122) 0:00:28.071 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:07 -0400 (0:00:00.034) 0:00:28.106 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:07 -0400 (0:00:00.055) 0:00:28.161 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:08 -0400 (0:00:00.033) 0:00:28.194 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:08 -0400 (0:00:00.034) 0:00:28.229 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:08 -0400 (0:00:00.041) 0:00:28.270 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:00:28.300 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:08 -0400 (0:00:00.033) 0:00:28.333 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:08 -0400 (0:00:00.030) 0:00:28.364 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:08 -0400 (0:00:00.031) 0:00:28.396 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:08 -0400 (0:00:00.030) 0:00:28.426 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:08 -0400 (0:00:00.031) 0:00:28.457 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:08 -0400 (0:00:00.030) 0:00:28.488 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:00:28.517 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:08 -0400 (0:00:00.066) 0:00:28.584 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:08 -0400 (0:00:00.038) 0:00:28.623 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:08 -0400 (0:00:00.057) 0:00:28.680 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:00:28.710 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:08 -0400 (0:00:00.030) 0:00:28.741 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:08 -0400 (0:00:00.027) 0:00:28.768 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:00:28.798 ***********
[WARNING]: Using a variable for a task's 'args' is unsafe in some situations
(see
https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-
unsafe)
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:09 -0400 (0:00:00.590) 0:00:29.388 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:09 -0400 (0:00:00.034) 0:00:29.423 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:09 -0400 (0:00:00.057) 0:00:29.480 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:09 -0400 (0:00:00.034) 0:00:29.515 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:09 -0400 (0:00:00.034) 0:00:29.549 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:09 -0400 (0:00:00.045) 0:00:29.594 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:09 -0400 (0:00:00.032) 0:00:29.626 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:09 -0400 (0:00:00.030) 0:00:29.657 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:09 -0400 (0:00:00.031) 0:00:29.689 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:09 -0400 (0:00:00.030) 0:00:29.719 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:09 -0400 (0:00:00.066) 0:00:29.786 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:09 -0400 (0:00:00.032) 0:00:29.818 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:09 -0400 (0:00:00.032) 0:00:29.851 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:09 -0400 (0:00:00.032) 0:00:29.883 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:09 -0400 (0:00:00.032) 0:00:29.916 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:09 -0400 (0:00:00.042) 0:00:29.958 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:09 -0400 (0:00:00.056) 0:00:30.014 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:09 -0400 (0:00:00.030) 0:00:30.045 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:09 -0400 (0:00:00.030) 0:00:30.076 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:09 -0400 (0:00:00.031) 0:00:30.107 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:09 -0400 (0:00:00.029) 0:00:30.137 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:16:10 -0400 (0:00:00.484) 0:00:30.621 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:16:10 -0400 (0:00:00.035) 0:00:30.657 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:10 -0400 (0:00:00.055) 0:00:30.712 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:10 -0400 (0:00:00.034) 0:00:30.747 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:10 -0400 (0:00:00.034) 0:00:30.782 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:10 -0400 (0:00:00.043) 0:00:30.826 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:10 -0400 (0:00:00.069) 0:00:30.895 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:10 -0400 (0:00:00.038) 0:00:30.933 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:10 -0400 (0:00:00.031) 0:00:30.965 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:10 -0400 (0:00:00.032) 0:00:30.997 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:10 -0400 (0:00:00.030) 0:00:31.028 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:10 -0400 (0:00:00.031) 0:00:31.060 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:10 -0400 (0:00:00.033) 0:00:31.093 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:10 -0400 (0:00:00.032) 0:00:31.126 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:16:10 -0400 (0:00:00.031) 0:00:31.157 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:16:11 -0400 (0:00:00.038) 0:00:31.195 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:11 -0400 (0:00:00.056) 0:00:31.252 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:11 -0400 (0:00:00.028) 0:00:31.281 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:11 -0400 (0:00:00.030) 0:00:31.311 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:16:11 -0400 (0:00:00.028) 0:00:31.340 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:16:11 -0400 (0:00:00.029) 0:00:31.369 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184
Monday 07 July 2025 20:16:11 -0400 (0:00:00.480) 0:00:31.850 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191
Monday 07 July 2025 20:16:11 -0400 (0:00:00.027) 0:00:31.878 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:11 -0400 (0:00:00.184) 0:00:32.063 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.network",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:11 -0400 (0:00:00.044) 0:00:32.107 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:11 -0400 (0:00:00.040) 0:00:32.148 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:12 -0400 (0:00:00.030) 0:00:32.178 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "network",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:12 -0400 (0:00:00.044) 0:00:32.223 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:12 -0400 (0:00:00.058) 0:00:32.281 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:12 -0400 (0:00:00.033) 0:00:32.315 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:12 -0400 (0:00:00.033) 0:00:32.349 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:12 -0400 (0:00:00.041) 0:00:32.390 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:12 -0400 (0:00:00.345) 0:00:32.736 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:12 -0400 (0:00:00.032) 0:00:32.769 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:12 -0400 (0:00:00.031) 0:00:32.800 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:12 -0400 (0:00:00.033) 0:00:32.833 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:12 -0400 (0:00:00.031) 0:00:32.865 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:12 -0400 (0:00:00.032) 0:00:32.897 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:12 -0400 (0:00:00.031) 0:00:32.928 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:12 -0400 (0:00:00.073) 0:00:33.002 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:12 -0400 (0:00:00.034) 0:00:33.037 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-network.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:12 -0400 (0:00:00.052) 0:00:33.090 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:12 -0400 (0:00:00.036) 0:00:33.126 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:12 -0400 (0:00:00.033) 0:00:33.160 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:13 -0400 (0:00:00.074) 0:00:33.235 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:13 -0400 (0:00:00.037) 0:00:33.272 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:13 -0400 (0:00:00.030) 0:00:33.302 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:13 -0400 (0:00:00.067) 0:00:33.370 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:13 -0400 (0:00:00.053) 0:00:33.423 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:13 -0400 (0:00:00.031) 0:00:33.455 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:13 -0400 (0:00:00.030) 0:00:33.485 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:13 -0400 (0:00:00.029) 0:00:33.515 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:13 -0400 (0:00:00.027) 0:00:33.543 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:13 -0400 (0:00:00.030) 0:00:33.574 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 90,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:13 -0400 (0:00:00.354) 0:00:33.928 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0",
"dest": "/etc/containers/systemd/quadlet-demo.network",
"gid": 0,
"group": "root",
"md5sum": "061f3cf318cbd8ab5794bb1173831fb8",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 74,
"src": "/root/.ansible/tmp/ansible-tmp-1751933773.8054082-20225-128599699437087/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:14 -0400 (0:00:00.828) 0:00:34.757 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:14 -0400 (0:00:00.035) 0:00:34.793 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:14 -0400 (0:00:00.032) 0:00:34.825 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:15 -0400 (0:00:00.612) 0:00:35.437 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-network.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "-.mount basic.target system.slice sysinit.target systemd-journald.socket",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-network.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.30.0/24 --gateway=192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-network.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-network.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-network.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "-.mount system.slice sysinit.target",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-network",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:15 -0400 (0:00:00.568) 0:00:36.006 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:15 -0400 (0:00:00.033) 0:00:36.039 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo-mysql.volume",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Volume]",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:15 -0400 (0:00:00.042) 0:00:36.082 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:15 -0400 (0:00:00.038) 0:00:36.120 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:15 -0400 (0:00:00.029) 0:00:36.150 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "volume",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:16 -0400 (0:00:00.044) 0:00:36.195 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:16 -0400 (0:00:00.056) 0:00:36.251 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:16 -0400 (0:00:00.033) 0:00:36.285 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:36.318 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:16 -0400 (0:00:00.043) 0:00:36.362 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:16 -0400 (0:00:00.350) 0:00:36.712 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:36.745 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:16 -0400 (0:00:00.034) 0:00:36.779 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:16 -0400 (0:00:00.030) 0:00:36.810 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:16 -0400 (0:00:00.072) 0:00:36.882 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:36.915 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:16 -0400 (0:00:00.033) 0:00:36.948 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:16 -0400 (0:00:00.037) 0:00:36.986 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:37.019 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql-volume.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:16 -0400 (0:00:00.051) 0:00:37.071 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:37.103 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:16 -0400 (0:00:00.032) 0:00:37.135 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:17 -0400 (0:00:00.072) 0:00:37.208 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:17 -0400 (0:00:00.036) 0:00:37.244 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:17 -0400 (0:00:00.029) 0:00:37.274 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:17 -0400 (0:00:00.064) 0:00:37.338 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:17 -0400 (0:00:00.051) 0:00:37.390 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:17 -0400 (0:00:00.030) 0:00:37.421 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:17 -0400 (0:00:00.029) 0:00:37.450 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:17 -0400 (0:00:00.031) 0:00:37.482 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:17 -0400 (0:00:00.070) 0:00:37.553 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:17 -0400 (0:00:00.030) 0:00:37.583 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 118,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:17 -0400 (0:00:00.359) 0:00:37.942 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.volume",
"gid": 0,
"group": "root",
"md5sum": "5ddd03a022aeb4502d9bc8ce436b4233",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 9,
"src": "/root/.ansible/tmp/ansible-tmp-1751933777.8190536-20316-75804711088380/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:18 -0400 (0:00:00.647) 0:00:38.590 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:18 -0400 (0:00:00.036) 0:00:38.626 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:18 -0400 (0:00:00.030) 0:00:38.657 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:19 -0400 (0:00:00.605) 0:00:39.262 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql-volume.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target sysinit.target -.mount systemd-journald.socket system.slice",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo-mysql-volume.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql-volume.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "control-group",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql-volume.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "yes",
"RemoveIPC": "no",
"Requires": "sysinit.target -.mount system.slice",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql-volume",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "infinity",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "oneshot",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:19 -0400 (0:00:00.565) 0:00:39.827 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:19 -0400 (0:00:00.033) 0:00:39.861 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n",
"__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:19 -0400 (0:00:00.088) 0:00:39.949 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:19 -0400 (0:00:00.039) 0:00:39.989 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:19 -0400 (0:00:00.038) 0:00:40.027 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo-mysql",
"__podman_quadlet_type": "container",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:19 -0400 (0:00:00.047) 0:00:40.075 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:19 -0400 (0:00:00.057) 0:00:40.132 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:19 -0400 (0:00:00.033) 0:00:40.166 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:20 -0400 (0:00:00.035) 0:00:40.201 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:20 -0400 (0:00:00.041) 0:00:40.242 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:20 -0400 (0:00:00.347) 0:00:40.590 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:20 -0400 (0:00:00.076) 0:00:40.666 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:20 -0400 (0:00:00.032) 0:00:40.699 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:20 -0400 (0:00:00.035) 0:00:40.734 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:20 -0400 (0:00:00.032) 0:00:40.767 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:20 -0400 (0:00:00.033) 0:00:40.800 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:20 -0400 (0:00:00.034) 0:00:40.834 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:20 -0400 (0:00:00.032) 0:00:40.867 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:20 -0400 (0:00:00.033) 0:00:40.900 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "quadlet-demo-mysql.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:20 -0400 (0:00:00.052) 0:00:40.953 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:20 -0400 (0:00:00.036) 0:00:40.990 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:20 -0400 (0:00:00.040) 0:00:41.030 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/mysql:5.6"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container",
"__podman_volumes": [
"/tmp/quadlet_demo"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:20 -0400 (0:00:00.074) 0:00:41.104 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:20 -0400 (0:00:00.039) 0:00:41.144 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:21 -0400 (0:00:00.028) 0:00:41.173 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:21 -0400 (0:00:00.065) 0:00:41.238 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:21 -0400 (0:00:00.051) 0:00:41.289 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:21 -0400 (0:00:00.030) 0:00:41.319 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:21 -0400 (0:00:00.067) 0:00:41.386 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:21 -0400 (0:00:00.031) 0:00:41.418 ***********
changed: [managed-node1] => (item=/tmp/quadlet_demo) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/quadlet_demo",
"mode": "0777",
"owner": "root",
"path": "/tmp/quadlet_demo",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:21 -0400 (0:00:00.365) 0:00:41.783 ***********
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:28 -0400 (0:00:07.221) 0:00:49.004 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 151,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:29 -0400 (0:00:00.356) 0:00:49.361 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:29 -0400 (0:00:00.035) 0:00:49.397 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4",
"dest": "/etc/containers/systemd/quadlet-demo-mysql.container",
"gid": 0,
"group": "root",
"md5sum": "341b473056d2a5dfa35970b0d2e23a5d",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 363,
"src": "/root/.ansible/tmp/ansible-tmp-1751933789.2744262-20446-266850841937438/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:29 -0400 (0:00:00.652) 0:00:50.049 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:29 -0400 (0:00:00.034) 0:00:50.083 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:30 -0400 (0:00:00.621) 0:00:50.705 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo-mysql.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target quadlet-demo-network.service systemd-journald.socket system.slice sysinit.target quadlet-demo-mysql-volume.service -.mount tmp.mount",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "yes",
"DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids",
"Description": "quadlet-demo-mysql.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network=systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo-mysql.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo-mysql.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "-.mount sysinit.target quadlet-demo-network.service system.slice quadlet-demo-mysql-volume.service",
"RequiresMountsFor": "/run/containers /tmp/quadlet_demo",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo-mysql",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:31 -0400 (0:00:01.021) 0:00:51.726 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:31 -0400 (0:00:00.033) 0:00:51.760 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "envoy-proxy-configmap.yml",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:31 -0400 (0:00:00.044) 0:00:51.805 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:31 -0400 (0:00:00.040) 0:00:51.845 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:31 -0400 (0:00:00.033) 0:00:51.878 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "envoy-proxy-configmap",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:31 -0400 (0:00:00.046) 0:00:51.925 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:31 -0400 (0:00:00.059) 0:00:51.985 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:31 -0400 (0:00:00.035) 0:00:52.020 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:31 -0400 (0:00:00.079) 0:00:52.099 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:31 -0400 (0:00:00.043) 0:00:52.143 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:32 -0400 (0:00:00.364) 0:00:52.507 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:32 -0400 (0:00:00.032) 0:00:52.540 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:32 -0400 (0:00:00.034) 0:00:52.575 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:32 -0400 (0:00:00.034) 0:00:52.609 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:32 -0400 (0:00:00.032) 0:00:52.642 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:52.675 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:32 -0400 (0:00:00.032) 0:00:52.708 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:52.742 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:32 -0400 (0:00:00.032) 0:00:52.775 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:32 -0400 (0:00:00.056) 0:00:52.831 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:32 -0400 (0:00:00.035) 0:00:52.867 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:32 -0400 (0:00:00.033) 0:00:52.900 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:32 -0400 (0:00:00.075) 0:00:52.976 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:32 -0400 (0:00:00.039) 0:00:53.015 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:32 -0400 (0:00:00.030) 0:00:53.046 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:32 -0400 (0:00:00.071) 0:00:53.117 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:33 -0400 (0:00:00.095) 0:00:53.213 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:33 -0400 (0:00:00.030) 0:00:53.244 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:33 -0400 (0:00:00.031) 0:00:53.276 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:33 -0400 (0:00:00.030) 0:00:53.306 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:33 -0400 (0:00:00.029) 0:00:53.336 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:33 -0400 (0:00:00.030) 0:00:53.366 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 187,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:33 -0400 (0:00:00.365) 0:00:53.731 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "d681c7d56f912150d041873e880818b22a90c188",
"dest": "/etc/containers/systemd/envoy-proxy-configmap.yml",
"gid": 0,
"group": "root",
"md5sum": "aec75d972c231aac004e1338934544cf",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 2102,
"src": "/root/.ansible/tmp/ansible-tmp-1751933793.6082838-20536-246540776008881/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:34 -0400 (0:00:00.681) 0:00:54.413 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:34 -0400 (0:00:00.034) 0:00:54.447 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:34 -0400 (0:00:00.032) 0:00:54.480 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:34 -0400 (0:00:00.669) 0:00:55.150 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:35 -0400 (0:00:00.037) 0:00:55.188 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:35 -0400 (0:00:00.037) 0:00:55.225 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n",
"__podman_quadlet_template_src": "quadlet-demo.yml.j2"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:35 -0400 (0:00:00.087) 0:00:55.312 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:35 -0400 (0:00:00.039) 0:00:55.352 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_str",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:35 -0400 (0:00:00.031) 0:00:55.384 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "yml",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:35 -0400 (0:00:00.047) 0:00:55.431 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:35 -0400 (0:00:00.102) 0:00:55.534 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:35 -0400 (0:00:00.036) 0:00:55.570 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:35 -0400 (0:00:00.035) 0:00:55.606 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:35 -0400 (0:00:00.044) 0:00:55.650 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:35 -0400 (0:00:00.353) 0:00:56.004 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:35 -0400 (0:00:00.034) 0:00:56.038 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:35 -0400 (0:00:00.032) 0:00:56.071 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:35 -0400 (0:00:00.033) 0:00:56.104 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:35 -0400 (0:00:00.038) 0:00:56.143 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:36 -0400 (0:00:00.032) 0:00:56.175 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:36 -0400 (0:00:00.033) 0:00:56.208 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:36 -0400 (0:00:00.033) 0:00:56.242 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:36 -0400 (0:00:00.033) 0:00:56.275 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": "",
"__podman_service_name": "",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:36 -0400 (0:00:00.054) 0:00:56.330 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:36 -0400 (0:00:00.034) 0:00:56.364 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_kube_yamls_raw | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:36 -0400 (0:00:00.035) 0:00:56.399 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:36 -0400 (0:00:00.074) 0:00:56.474 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:36 -0400 (0:00:00.038) 0:00:56.513 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:36 -0400 (0:00:00.029) 0:00:56.543 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:36 -0400 (0:00:00.115) 0:00:56.658 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:36 -0400 (0:00:00.052) 0:00:56.710 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:36 -0400 (0:00:00.031) 0:00:56.742 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:36 -0400 (0:00:00.030) 0:00:56.772 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:36 -0400 (0:00:00.031) 0:00:56.804 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:36 -0400 (0:00:00.028) 0:00:56.832 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:36 -0400 (0:00:00.032) 0:00:56.865 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:37 -0400 (0:00:00.353) 0:00:57.219 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_quadlet_file_src | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:37 -0400 (0:00:00.035) 0:00:57.254 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "998dccde0483b1654327a46ddd89cbaa47650370",
"dest": "/etc/containers/systemd/quadlet-demo.yml",
"gid": 0,
"group": "root",
"md5sum": "fd890594adfc24339cb9cdc5e7b19a66",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 1605,
"src": "/root/.ansible/tmp/ansible-tmp-1751933797.1321883-20621-109965661389418/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:37 -0400 (0:00:00.791) 0:00:58.045 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_content is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:37 -0400 (0:00:00.030) 0:00:58.076 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:16:38 -0400 (0:00:00.844) 0:00:58.920 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:16:38 -0400 (0:00:00.036) 0:00:58.957 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_service_name | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:16:38 -0400 (0:00:00.038) 0:00:58.995 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:16:38 -0400 (0:00:00.044) 0:00:59.040 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "created",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:16:38 -0400 (0:00:00.039) 0:00:59.079 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:16:38 -0400 (0:00:00.079) 0:00:59.159 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:16:39 -0400 (0:00:00.049) 0:00:59.208 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:16:39 -0400 (0:00:00.060) 0:00:59.269 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:16:39 -0400 (0:00:00.036) 0:00:59.306 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:16:39 -0400 (0:00:00.035) 0:00:59.341 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:16:39 -0400 (0:00:00.043) 0:00:59.385 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:16:39 -0400 (0:00:00.351) 0:00:59.736 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:16:39 -0400 (0:00:00.032) 0:00:59.769 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:16:39 -0400 (0:00:00.033) 0:00:59.802 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:16:39 -0400 (0:00:00.033) 0:00:59.835 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:16:39 -0400 (0:00:00.032) 0:00:59.868 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:16:39 -0400 (0:00:00.033) 0:00:59.901 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:16:39 -0400 (0:00:00.032) 0:00:59.934 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:16:39 -0400 (0:00:00.034) 0:00:59.968 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:16:39 -0400 (0:00:00.032) 0:01:00.001 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:16:39 -0400 (0:00:00.055) 0:01:00.057 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:16:39 -0400 (0:00:00.035) 0:01:00.092 ***********
ok: [managed-node1] => {
"changed": false,
"content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK",
"encoding": "base64",
"source": "/etc/containers/systemd/quadlet-demo.yml"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:16:40 -0400 (0:00:00.344) 0:01:00.437 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [
"quay.io/linux-system-roles/wordpress:4.8-apache",
"quay.io/linux-system-roles/envoyproxy:v1.25.0"
],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": [
"/tmp/httpd3",
"/tmp/httpd3-create"
]
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:16:40 -0400 (0:00:00.131) 0:01:00.568 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:16:40 -0400 (0:00:00.038) 0:01:00.607 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state == \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create and update quadlets] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117
Monday 07 July 2025 20:16:40 -0400 (0:00:00.031) 0:01:00.638 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2
Monday 07 July 2025 20:16:40 -0400 (0:00:00.068) 0:01:00.706 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:16:40 -0400 (0:00:00.053) 0:01:00.760 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:16:40 -0400 (0:00:00.031) 0:01:00.792 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:16:40 -0400 (0:00:00.031) 0:01:00.823 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Create host directories] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7
Monday 07 July 2025 20:16:40 -0400 (0:00:00.030) 0:01:00.853 ***********
changed: [managed-node1] => (item=/tmp/httpd3) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
changed: [managed-node1] => (item=/tmp/httpd3-create) => {
"ansible_loop_var": "item",
"changed": true,
"gid": 0,
"group": "root",
"item": "/tmp/httpd3-create",
"mode": "0755",
"owner": "root",
"path": "/tmp/httpd3-create",
"secontext": "unconfined_u:object_r:user_tmp_t:s0",
"size": 6,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure container images are present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
Monday 07 July 2025 20:16:41 -0400 (0:00:00.814) 0:01:01.667 ***********
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => (item=None) => {
"attempts": 1,
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39
Monday 07 July 2025 20:16:58 -0400 (0:00:17.256) 0:01:18.924 ***********
ok: [managed-node1] => {
"changed": false,
"gid": 0,
"group": "root",
"mode": "0755",
"owner": "root",
"path": "/etc/containers/systemd",
"secontext": "system_u:object_r:etc_t:s0",
"size": 4096,
"state": "directory",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50
Monday 07 July 2025 20:16:59 -0400 (0:00:00.358) 0:01:19.283 ***********
changed: [managed-node1] => {
"changed": true,
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"dest": "/etc/containers/systemd/quadlet-demo.kube",
"gid": 0,
"group": "root",
"md5sum": "da53c88f92b68b0487aa209f795b6bb3",
"mode": "0644",
"owner": "root",
"secontext": "system_u:object_r:etc_t:s0",
"size": 456,
"src": "/root/.ansible/tmp/ansible-tmp-1751933819.1617088-20790-103013089228288/source",
"state": "file",
"uid": 0
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62
Monday 07 July 2025 20:16:59 -0400 (0:00:00.652) 0:01:19.935 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75
Monday 07 July 2025 20:16:59 -0400 (0:00:00.035) 0:01:19.971 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_copy_file is skipped",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Reload systemctl] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87
Monday 07 July 2025 20:16:59 -0400 (0:00:00.032) 0:01:20.004 ***********
ok: [managed-node1] => {
"changed": false,
"name": null,
"status": {}
}
TASK [fedora.linux_system_roles.podman : Start service] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
Monday 07 July 2025 20:17:00 -0400 (0:00:00.620) 0:01:20.625 ***********
changed: [managed-node1] => {
"changed": true,
"name": "quadlet-demo.service",
"state": "started",
"status": {
"ActiveEnterTimestampMonotonic": "0",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "inactive",
"After": "basic.target system.slice sysinit.target quadlet-demo-network.service systemd-journald.socket quadlet-demo-mysql.service -.mount",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "no",
"AssertTimestampMonotonic": "0",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "no",
"ConditionTimestampMonotonic": "0",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "0",
"ExecMainStartTimestampMonotonic": "0",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network=systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestampMonotonic": "0",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "0",
"MemoryAccounting": "yes",
"MemoryCurrent": "[not set]",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "sysinit.target system.slice -.mount quadlet-demo-mysql.service quadlet-demo-network.service",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestampMonotonic": "0",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "dead",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "[not set]",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : Restart service] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131
Monday 07 July 2025 20:17:02 -0400 (0:00:01.581) 0:01:22.206 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_service_started is changed",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Cancel linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198
Monday 07 July 2025 20:17:02 -0400 (0:00:00.080) 0:01:22.287 ***********
skipping: [managed-node1] => {
"changed": false,
"skipped_reason": "No items in the list"
}
TASK [fedora.linux_system_roles.podman : Handle credential files - absent] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204
Monday 07 July 2025 20:17:02 -0400 (0:00:00.028) 0:01:22.316 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213
Monday 07 July 2025 20:17:02 -0400 (0:00:00.034) 0:01:22.350 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [Check quadlet files] *****************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96
Monday 07 July 2025 20:17:02 -0400 (0:00:00.045) 0:01:22.395 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/containers/systemd"
],
"delta": "0:00:00.004663",
"end": "2025-07-07 20:17:02.570020",
"rc": 0,
"start": "2025-07-07 20:17:02.565357"
}
STDOUT:
total 44
-rw-r--r--. 1 root root 171 Jul 7 20:11 auth_test_1_quadlet.container
drwxr-xr-x. 9 root root 4096 Jul 7 20:14 ../
-rw-r--r--. 1 root root 151 Jul 7 20:15 nopull.container
-rw-r--r--. 1 root root 138 Jul 7 20:15 bogus.container
-rw-r--r--. 1 root root 74 Jul 7 20:16 quadlet-demo.network
-rw-r--r--. 1 root root 9 Jul 7 20:16 quadlet-demo-mysql.volume
-rw-r--r--. 1 root root 363 Jul 7 20:16 quadlet-demo-mysql.container
-rw-r--r--. 1 root root 2102 Jul 7 20:16 envoy-proxy-configmap.yml
-rw-r--r--. 1 root root 1605 Jul 7 20:16 quadlet-demo.yml
-rw-r--r--. 1 root root 456 Jul 7 20:16 quadlet-demo.kube
drwxr-xr-x. 2 root root 4096 Jul 7 20:16 ./
TASK [Check containers] ********************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100
Monday 07 July 2025 20:17:02 -0400 (0:00:00.415) 0:01:22.811 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.062736",
"end": "2025-07-07 20:17:03.039714",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:02.976978"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
43ced9d07da7 quay.io/linux-system-roles/mysql:5.6 mysqld 31 seconds ago Up 32 seconds (healthy) quadlet-demo-mysql
6590bf67f59b localhost/podman-pause:4.9.4-dev-1708535009 2 seconds ago Up 2 seconds a96f3a51b8d1-service
cb9417aec732 localhost/podman-pause:4.9.4-dev-1708535009 1 second ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 501c88c14505-infra
895e1420c8b6 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-wordpress
a14813358ad3 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 2 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-envoy
TASK [Check volumes] ***********************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105
Monday 07 July 2025 20:17:03 -0400 (0:00:00.459) 0:01:23.270 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"volume",
"ls"
],
"delta": "0:00:00.040116",
"end": "2025-07-07 20:17:03.457003",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:03.416887"
}
STDOUT:
DRIVER VOLUME NAME
local systemd-quadlet-demo-mysql
local wp-pv-claim
local envoy-proxy-config
local envoy-certificates
TASK [Check pods] **************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110
Monday 07 July 2025 20:17:03 -0400 (0:00:00.425) 0:01:23.696 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.039183",
"end": "2025-07-07 20:17:03.858466",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:03.819283"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
501c88c14505 quadlet-demo Running 2 seconds ago cb9417aec732 cb9417aec732,895e1420c8b6,a14813358ad3 501c88c14505-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115
Monday 07 July 2025 20:17:03 -0400 (0:00:00.392) 0:01:24.088 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units | grep quadlet",
"delta": "0:00:00.012776",
"end": "2025-07-07 20:17:04.228042",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:04.215266"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [Check web] ***************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
Monday 07 July 2025 20:17:04 -0400 (0:00:00.369) 0:01:24.458 ***********
changed: [managed-node1] => {
"attempts": 1,
"changed": true,
"checksum_dest": null,
"checksum_src": "d1ac587ee4653b36ed40791b2bca2a83cf8cb157",
"dest": "/run/out",
"elapsed": 0,
"gid": 0,
"group": "root",
"md5sum": "95e8238992037c7b6b6decebba46e982",
"mode": "0600",
"owner": "root",
"secontext": "system_u:object_r:var_run_t:s0",
"size": 11666,
"src": "/root/.ansible/tmp/ansible-tmp-1751933824.3271575-20885-176678670070928/tmp610bj9n9",
"state": "file",
"status_code": 200,
"uid": 0,
"url": "https://localhost:8000"
}
MSG:
OK (unknown bytes)
TASK [Show web] ****************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:132
Monday 07 July 2025 20:17:05 -0400 (0:00:01.285) 0:01:25.743 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"cat",
"/run/out"
],
"delta": "0:00:00.002769",
"end": "2025-07-07 20:17:05.859744",
"rc": 0,
"start": "2025-07-07 20:17:05.856975"
}
STDOUT:
WordPress › Installation
WordPress
TASK [Error] *******************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:137
Monday 07 July 2025 20:17:05 -0400 (0:00:00.347) 0:01:26.091 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__web_status is failed",
"skip_reason": "Conditional result was False"
}
TASK [Check] *******************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148
Monday 07 July 2025 20:17:05 -0400 (0:00:00.031) 0:01:26.122 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"ps",
"-a"
],
"delta": "0:00:00.038730",
"end": "2025-07-07 20:17:06.270688",
"rc": 0,
"start": "2025-07-07 20:17:06.231958"
}
STDOUT:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
43ced9d07da7 quay.io/linux-system-roles/mysql:5.6 mysqld 35 seconds ago Up 35 seconds (healthy) quadlet-demo-mysql
6590bf67f59b localhost/podman-pause:4.9.4-dev-1708535009 5 seconds ago Up 5 seconds a96f3a51b8d1-service
cb9417aec732 localhost/podman-pause:4.9.4-dev-1708535009 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 501c88c14505-infra
895e1420c8b6 quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-wordpress
a14813358ad3 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 5 seconds ago Up 5 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp quadlet-demo-envoy
TASK [Check pods] **************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152
Monday 07 July 2025 20:17:06 -0400 (0:00:00.376) 0:01:26.499 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"pod",
"ps",
"--ctr-ids",
"--ctr-names",
"--ctr-status"
],
"delta": "0:00:00.039909",
"end": "2025-07-07 20:17:06.652544",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:06.612635"
}
STDOUT:
POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS
501c88c14505 quadlet-demo Running 5 seconds ago cb9417aec732 cb9417aec732,895e1420c8b6,a14813358ad3 501c88c14505-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running
TASK [Check systemd] ***********************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157
Monday 07 July 2025 20:17:06 -0400 (0:00:00.382) 0:01:26.882 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet",
"delta": "0:00:00.011131",
"end": "2025-07-07 20:17:07.004049",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:06.992918"
}
STDOUT:
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
quadlet-demo.service loaded active running quadlet-demo.service
TASK [LS] **********************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165
Monday 07 July 2025 20:17:07 -0400 (0:00:00.350) 0:01:27.232 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"ls",
"-alrtF",
"/etc/systemd/system"
],
"delta": "0:00:00.003713",
"end": "2025-07-07 20:17:07.350672",
"failed_when_result": false,
"rc": 0,
"start": "2025-07-07 20:17:07.346959"
}
STDOUT:
total 8
lrwxrwxrwx. 1 root root 9 May 11 2019 systemd-timedated.service -> /dev/null
drwxr-xr-x. 4 root root 169 May 29 2024 ../
lrwxrwxrwx. 1 root root 39 May 29 2024 syslog.service -> /usr/lib/systemd/system/rsyslog.service
drwxr-xr-x. 2 root root 32 May 29 2024 getty.target.wants/
lrwxrwxrwx. 1 root root 37 May 29 2024 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target
lrwxrwxrwx. 1 root root 57 May 29 2024 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service
drwxr-xr-x. 2 root root 48 May 29 2024 network-online.target.wants/
lrwxrwxrwx. 1 root root 41 May 29 2024 dbus-org.freedesktop.timedate1.service -> /usr/lib/systemd/system/timedatex.service
drwxr-xr-x. 2 root root 61 May 29 2024 timers.target.wants/
drwxr-xr-x. 2 root root 31 May 29 2024 basic.target.wants/
drwxr-xr-x. 2 root root 38 May 29 2024 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/
lrwxrwxrwx. 1 root root 41 May 29 2024 default.target -> /usr/lib/systemd/system/multi-user.target
drwxr-xr-x. 2 root root 51 May 29 2024 sockets.target.wants/
drwxr-xr-x. 2 root root 31 May 29 2024 remote-fs.target.wants/
drwxr-xr-x. 2 root root 59 May 29 2024 sshd-keygen@.service.d/
drwxr-xr-x. 2 root root 119 May 29 2024 cloud-init.target.wants/
drwxr-xr-x. 2 root root 181 May 29 2024 sysinit.target.wants/
drwxr-xr-x. 2 root root 4096 Jul 7 20:16 multi-user.target.wants/
lrwxrwxrwx. 1 root root 41 Jul 7 20:16 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service
drwxr-xr-x. 13 root root 4096 Jul 7 20:16 ./
TASK [Cleanup] *****************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172
Monday 07 July 2025 20:17:07 -0400 (0:00:00.347) 0:01:27.579 ***********
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3
Monday 07 July 2025 20:17:07 -0400 (0:00:00.123) 0:01:27.703 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] ****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3
Monday 07 July 2025 20:17:07 -0400 (0:00:00.052) 0:01:27.755 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11
Monday 07 July 2025 20:17:07 -0400 (0:00:00.037) 0:01:27.793 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16
Monday 07 July 2025 20:17:07 -0400 (0:00:00.032) 0:01:27.825 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_ostree is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23
Monday 07 July 2025 20:17:07 -0400 (0:00:00.033) 0:01:27.859 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28
Monday 07 July 2025 20:17:07 -0400 (0:00:00.031) 0:01:27.891 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32
Monday 07 July 2025 20:17:07 -0400 (0:00:00.032) 0:01:27.923 ***********
ok: [managed-node1] => (item=RedHat.yml) => {
"ansible_facts": {
"__podman_packages": [
"podman",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "RedHat.yml"
}
skipping: [managed-node1] => (item=CentOS.yml) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "__vars_file is file",
"item": "CentOS.yml",
"skip_reason": "Conditional result was False"
}
ok: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
ok: [managed-node1] => (item=CentOS_8.yml) => {
"ansible_facts": {
"__podman_packages": [
"crun",
"podman",
"podman-plugins",
"shadow-utils-subid"
]
},
"ansible_included_var_files": [
"/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml"
],
"ansible_loop_var": "item",
"changed": false,
"item": "CentOS_8.yml"
}
TASK [fedora.linux_system_roles.podman : Gather the package facts] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Monday 07 July 2025 20:17:07 -0400 (0:00:00.068) 0:01:27.992 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Enable copr if requested] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10
Monday 07 July 2025 20:17:09 -0400 (0:00:01.443) 0:01:29.435 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_use_copr | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14
Monday 07 July 2025 20:17:09 -0400 (0:00:00.031) 0:01:29.467 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28
Monday 07 July 2025 20:17:09 -0400 (0:00:00.039) 0:01:29.506 ***********
skipping: [managed-node1] => {
"false_condition": "__podman_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33
Monday 07 July 2025 20:17:09 -0400 (0:00:00.031) 0:01:29.538 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38
Monday 07 July 2025 20:17:09 -0400 (0:00:00.032) 0:01:29.571 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get podman version] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46
Monday 07 July 2025 20:17:09 -0400 (0:00:00.033) 0:01:29.604 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": [
"podman",
"--version"
],
"delta": "0:00:00.029481",
"end": "2025-07-07 20:17:09.742978",
"rc": 0,
"start": "2025-07-07 20:17:09.713497"
}
STDOUT:
podman version 4.9.4-dev
TASK [fedora.linux_system_roles.podman : Set podman version] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52
Monday 07 July 2025 20:17:09 -0400 (0:00:00.366) 0:01:29.971 ***********
ok: [managed-node1] => {
"ansible_facts": {
"podman_version": "4.9.4-dev"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56
Monday 07 July 2025 20:17:09 -0400 (0:00:00.077) 0:01:30.049 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.2\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63
Monday 07 July 2025 20:17:09 -0400 (0:00:00.030) 0:01:30.079 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_version is version(\"4.4\", \"<\")",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73
Monday 07 July 2025 20:17:09 -0400 (0:00:00.038) 0:01:30.117 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80
Monday 07 July 2025 20:17:09 -0400 (0:00:00.038) 0:01:30.156 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96
Monday 07 July 2025 20:17:10 -0400 (0:00:00.052) 0:01:30.209 ***********
META: end_host conditional evaluated to False, continuing execution for managed-node1
skipping: [managed-node1] => {
"skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1"
}
MSG:
end_host conditional evaluated to false, continuing execution for managed-node1
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109
Monday 07 July 2025 20:17:10 -0400 (0:00:00.053) 0:01:30.262 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:10 -0400 (0:00:00.062) 0:01:30.324 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:10 -0400 (0:00:00.036) 0:01:30.361 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:10 -0400 (0:00:00.039) 0:01:30.401 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:10 -0400 (0:00:00.044) 0:01:30.445 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:10 -0400 (0:00:00.350) 0:01:30.795 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:10 -0400 (0:00:00.035) 0:01:30.830 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:10 -0400 (0:00:00.033) 0:01:30.864 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:10 -0400 (0:00:00.035) 0:01:30.899 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:10 -0400 (0:00:00.034) 0:01:30.934 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:10 -0400 (0:00:00.033) 0:01:30.967 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:10 -0400 (0:00:00.080) 0:01:31.047 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:10 -0400 (0:00:00.033) 0:01:31.081 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set config file paths] ****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115
Monday 07 July 2025 20:17:10 -0400 (0:00:00.034) 0:01:31.115 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf",
"__podman_parent_mode": "0755",
"__podman_parent_path": "/etc/containers",
"__podman_policy_json_file": "/etc/containers/policy.json",
"__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf",
"__podman_storage_conf_file": "/etc/containers/storage.conf"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle container.conf.d] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126
Monday 07 July 2025 20:17:10 -0400 (0:00:00.042) 0:01:31.158 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5
Monday 07 July 2025 20:17:11 -0400 (0:00:00.061) 0:01:31.219 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update container config file] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13
Monday 07 July 2025 20:17:11 -0400 (0:00:00.032) 0:01:31.252 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_containers_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129
Monday 07 July 2025 20:17:11 -0400 (0:00:00.033) 0:01:31.285 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5
Monday 07 July 2025 20:17:11 -0400 (0:00:00.060) 0:01:31.346 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update registries config file] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13
Monday 07 July 2025 20:17:11 -0400 (0:00:00.033) 0:01:31.379 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_registries_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle storage.conf] ******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132
Monday 07 July 2025 20:17:11 -0400 (0:00:00.036) 0:01:31.415 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7
Monday 07 July 2025 20:17:11 -0400 (0:00:00.062) 0:01:31.478 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Update storage config file] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15
Monday 07 July 2025 20:17:11 -0400 (0:00:00.030) 0:01:31.509 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_storage_conf | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Handle policy.json] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135
Monday 07 July 2025 20:17:11 -0400 (0:00:00.033) 0:01:31.542 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8
Monday 07 July 2025 20:17:11 -0400 (0:00:00.063) 0:01:31.605 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16
Monday 07 July 2025 20:17:11 -0400 (0:00:00.032) 0:01:31.638 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get the existing policy.json] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21
Monday 07 July 2025 20:17:11 -0400 (0:00:00.075) 0:01:31.714 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Write new policy.json file] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27
Monday 07 July 2025 20:17:11 -0400 (0:00:00.033) 0:01:31.748 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_policy_json | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [Manage firewall for specified ports] *************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141
Monday 07 July 2025 20:17:11 -0400 (0:00:00.031) 0:01:31.779 ***********
TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Monday 07 July 2025 20:17:11 -0400 (0:00:00.105) 0:01:31.885 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node1
TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Monday 07 July 2025 20:17:11 -0400 (0:00:00.059) 0:01:31.945 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Monday 07 July 2025 20:17:11 -0400 (0:00:00.040) 0:01:31.985 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_ostree is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Monday 07 July 2025 20:17:11 -0400 (0:00:00.031) 0:01:32.016 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_ostree is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Monday 07 July 2025 20:17:11 -0400 (0:00:00.032) 0:01:32.049 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Monday 07 July 2025 20:17:11 -0400 (0:00:00.031) 0:01:32.080 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __firewall_is_transactional is defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Run systemctl] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34
Monday 07 July 2025 20:17:11 -0400 (0:00:00.033) 0:01:32.114 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Require installed systemd] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:41
Monday 07 July 2025 20:17:11 -0400 (0:00:00.030) 0:01:32.145 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:46
Monday 07 July 2025 20:17:12 -0400 (0:00:00.030) 0:01:32.175 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_booted is not defined",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
Monday 07 July 2025 20:17:12 -0400 (0:00:00.031) 0:01:32.207 ***********
ok: [managed-node1] => {
"changed": false,
"rc": 0,
"results": []
}
MSG:
Nothing to do
TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:63
Monday 07 July 2025 20:17:14 -0400 (0:00:02.861) 0:01:35.068 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_is_transactional | d(false)"
}
TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:68
Monday 07 July 2025 20:17:14 -0400 (0:00:00.032) 0:01:35.101 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:73
Monday 07 July 2025 20:17:14 -0400 (0:00:00.030) 0:01:35.132 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_is_transactional | d(false)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Monday 07 July 2025 20:17:15 -0400 (0:00:00.078) 0:01:35.210 ***********
skipping: [managed-node1] => (item=nftables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=iptables) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item=ufw) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:14
Monday 07 July 2025 20:17:15 -0400 (0:00:00.039) 0:01:35.250 ***********
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "nftables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "iptables",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall_disable_conflicting_services | bool",
"item": "ufw",
"skip_reason": "Conditional result was False",
"skipped": true
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:24
Monday 07 July 2025 20:17:15 -0400 (0:00:00.039) 0:01:35.290 ***********
ok: [managed-node1] => {
"changed": false,
"name": "firewalld",
"status": {
"ActiveEnterTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ActiveEnterTimestampMonotonic": "770968102",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus.service system.slice basic.target dbus.socket sysinit.target polkit.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"AssertTimestampMonotonic": "770655329",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ConditionTimestampMonotonic": "770655328",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target ip6tables.service ebtables.service iptables.service nftables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "68657",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ExecMainStartTimestampMonotonic": "770656916",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"InactiveExitTimestampMonotonic": "770657101",
"InvocationID": "0c5dfd0097c740e0935e55a556a3d7ad",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "68657",
"MemoryAccounting": "yes",
"MemoryCurrent": "41590784",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"StateChangeTimestampMonotonic": "770968102",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"WatchdogTimestampMonotonic": "770968100",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
Monday 07 July 2025 20:17:15 -0400 (0:00:00.499) 0:01:35.789 ***********
ok: [managed-node1] => {
"changed": false,
"enabled": true,
"name": "firewalld",
"state": "started",
"status": {
"ActiveEnterTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ActiveEnterTimestampMonotonic": "770968102",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "dbus.service system.slice basic.target dbus.socket sysinit.target polkit.service",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"AssertTimestampMonotonic": "770655329",
"Before": "network-pre.target shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"BusName": "org.fedoraproject.FirewallD1",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ConditionTimestampMonotonic": "770655328",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target ip6tables.service ebtables.service iptables.service nftables.service ipset.service",
"ControlGroup": "/system.slice/firewalld.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "firewalld - dynamic firewall daemon",
"DevicePolicy": "auto",
"Documentation": "man:firewalld(1)",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "68657",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"ExecMainStartTimestampMonotonic": "770656916",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/usr/lib/systemd/system/firewalld.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "firewalld.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"InactiveExitTimestampMonotonic": "770657101",
"InvocationID": "0c5dfd0097c740e0935e55a556a3d7ad",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "68657",
"MemoryAccounting": "yes",
"MemoryCurrent": "41590784",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "firewalld.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "dbus.socket system.slice sysinit.target",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "null",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "null",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"StateChangeTimestampMonotonic": "770968102",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "2",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "dbus",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "enabled",
"UnitFileState": "enabled",
"UtmpMode": "init",
"WantedBy": "multi-user.target",
"Wants": "network-pre.target",
"WatchdogTimestamp": "Mon 2025-07-07 20:16:05 EDT",
"WatchdogTimestampMonotonic": "770968100",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:36
Monday 07 July 2025 20:17:16 -0400 (0:00:00.502) 0:01:36.291 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__firewall_previous_replaced": false,
"__firewall_python_cmd": "/usr/libexec/platform-python",
"__firewall_report_changed": true
},
"changed": false
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:45
Monday 07 July 2025 20:17:16 -0400 (0:00:00.042) 0:01:36.333 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:58
Monday 07 July 2025 20:17:16 -0400 (0:00:00.032) 0:01:36.366 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
Monday 07 July 2025 20:17:16 -0400 (0:00:00.030) 0:01:36.397 ***********
ok: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "8000/tcp",
"state": "enabled"
}
}
ok: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"__firewall_changed": false,
"ansible_loop_var": "item",
"changed": false,
"item": {
"port": "9000/tcp",
"state": "enabled"
}
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:126
Monday 07 July 2025 20:17:17 -0400 (0:00:01.090) 0:01:37.487 ***********
skipping: [managed-node1] => (item={'port': '8000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "8000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => (item={'port': '9000/tcp', 'state': 'enabled'}) => {
"ansible_loop_var": "item",
"changed": false,
"false_condition": "firewall | length == 1",
"item": {
"port": "9000/tcp",
"state": "enabled"
},
"skip_reason": "Conditional result was False"
}
skipping: [managed-node1] => {
"changed": false
}
MSG:
All items skipped
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:137
Monday 07 July 2025 20:17:17 -0400 (0:00:00.048) 0:01:37.535 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall | length == 1",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:146
Monday 07 July 2025 20:17:17 -0400 (0:00:00.035) 0:01:37.570 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:152
Monday 07 July 2025 20:17:17 -0400 (0:00:00.032) 0:01:37.603 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "firewall == None or firewall | length == 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:161
Monday 07 July 2025 20:17:17 -0400 (0:00:00.033) 0:01:37.637 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:172
Monday 07 July 2025 20:17:17 -0400 (0:00:00.030) 0:01:37.667 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__firewall_previous_replaced | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:178
Monday 07 July 2025 20:17:17 -0400 (0:00:00.029) 0:01:37.697 ***********
skipping: [managed-node1] => {
"false_condition": "__firewall_previous_replaced | bool"
}
TASK [Manage selinux for specified ports] **************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148
Monday 07 July 2025 20:17:17 -0400 (0:00:00.047) 0:01:37.744 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "podman_selinux_ports | length > 0",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155
Monday 07 July 2025 20:17:17 -0400 (0:00:00.033) 0:01:37.778 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_cancel_user_linger": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] *******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159
Monday 07 July 2025 20:17:17 -0400 (0:00:00.032) 0:01:37.810 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle credential files - present] ****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168
Monday 07 July 2025 20:17:17 -0400 (0:00:00.073) 0:01:37.884 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle secrets] ***********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177
Monday 07 July 2025 20:17:17 -0400 (0:00:00.029) 0:01:37.913 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:17 -0400 (0:00:00.094) 0:01:38.008 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:17 -0400 (0:00:00.036) 0:01:38.044 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:17 -0400 (0:00:00.057) 0:01:38.102 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:17 -0400 (0:00:00.037) 0:01:38.140 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:18 -0400 (0:00:00.035) 0:01:38.175 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:18 -0400 (0:00:00.044) 0:01:38.219 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:38.251 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:18 -0400 (0:00:00.034) 0:01:38.285 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:18 -0400 (0:00:00.031) 0:01:38.317 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:38.350 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:18 -0400 (0:00:00.033) 0:01:38.383 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:18 -0400 (0:00:00.031) 0:01:38.415 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:38.447 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:18 -0400 (0:00:00.033) 0:01:38.481 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:18 -0400 (0:00:00.078) 0:01:38.560 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:18 -0400 (0:00:00.039) 0:01:38.600 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:18 -0400 (0:00:00.059) 0:01:38.659 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:18 -0400 (0:00:00.030) 0:01:38.690 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:18 -0400 (0:00:00.032) 0:01:38.722 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:18 -0400 (0:00:00.031) 0:01:38.753 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:18 -0400 (0:00:00.031) 0:01:38.785 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:19 -0400 (0:00:00.390) 0:01:39.176 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:19 -0400 (0:00:00.035) 0:01:39.212 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:19 -0400 (0:00:00.058) 0:01:39.270 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:19 -0400 (0:00:00.036) 0:01:39.307 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:19 -0400 (0:00:00.036) 0:01:39.343 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:19 -0400 (0:00:00.044) 0:01:39.387 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:39.420 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:19 -0400 (0:00:00.031) 0:01:39.452 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:19 -0400 (0:00:00.037) 0:01:39.489 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:19 -0400 (0:00:00.033) 0:01:39.523 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:19 -0400 (0:00:00.077) 0:01:39.601 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:19 -0400 (0:00:00.034) 0:01:39.636 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:39.669 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:19 -0400 (0:00:00.033) 0:01:39.702 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:39.735 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:19 -0400 (0:00:00.042) 0:01:39.777 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:19 -0400 (0:00:00.058) 0:01:39.836 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:19 -0400 (0:00:00.032) 0:01:39.868 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:19 -0400 (0:00:00.030) 0:01:39.899 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:19 -0400 (0:00:00.031) 0:01:39.930 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:19 -0400 (0:00:00.031) 0:01:39.962 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Set variables part 1] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3
Monday 07 July 2025 20:17:20 -0400 (0:00:00.384) 0:01:40.347 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7
Monday 07 July 2025 20:17:20 -0400 (0:00:00.038) 0:01:40.385 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:20 -0400 (0:00:00.057) 0:01:40.443 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:20 -0400 (0:00:00.042) 0:01:40.485 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:40.519 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:20 -0400 (0:00:00.096) 0:01:40.616 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:40.649 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:40.683 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:40.717 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:40.751 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:40.785 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:20 -0400 (0:00:00.032) 0:01:40.817 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:20 -0400 (0:00:00.034) 0:01:40.852 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:20 -0400 (0:00:00.032) 0:01:40.885 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_check_subids | d(true)",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set variables part 2] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14
Monday 07 July 2025 20:17:20 -0400 (0:00:00.033) 0:01:40.918 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_rootless": false,
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Manage linger] ************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20
Monday 07 July 2025 20:17:20 -0400 (0:00:00.039) 0:01:40.958 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Enable linger if needed] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12
Monday 07 July 2025 20:17:20 -0400 (0:00:00.060) 0:01:41.018 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18
Monday 07 July 2025 20:17:20 -0400 (0:00:00.031) 0:01:41.050 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22
Monday 07 July 2025 20:17:20 -0400 (0:00:00.032) 0:01:41.082 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25
Monday 07 July 2025 20:17:20 -0400 (0:00:00.030) 0:01:41.112 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Manage each secret] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41
Monday 07 July 2025 20:17:20 -0400 (0:00:00.030) 0:01:41.143 ***********
changed: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": true
}
TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184
Monday 07 July 2025 20:17:21 -0400 (0:00:00.391) 0:01:41.535 ***********
skipping: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191
Monday 07 July 2025 20:17:21 -0400 (0:00:00.028) 0:01:41.564 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log))
TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14
Monday 07 July 2025 20:17:21 -0400 (0:00:00.204) 0:01:41.768 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_file_src": "quadlet-demo.kube",
"__podman_quadlet_spec": {},
"__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml",
"__podman_quadlet_template_src": ""
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25
Monday 07 July 2025 20:17:21 -0400 (0:00:00.045) 0:01:41.813 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_continue_if_pull_fails": false,
"__podman_pull_image": true,
"__podman_state": "absent",
"__podman_systemd_unit_scope": "",
"__podman_user": "root"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35
Monday 07 July 2025 20:17:21 -0400 (0:00:00.041) 0:01:41.855 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_quadlet_file_src",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48
Monday 07 July 2025 20:17:21 -0400 (0:00:00.033) 0:01:41.888 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_name": "quadlet-demo",
"__podman_quadlet_type": "kube",
"__podman_rootless": false
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Check user and group information] *****
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57
Monday 07 July 2025 20:17:21 -0400 (0:00:00.046) 0:01:41.935 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Get user information] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2
Monday 07 July 2025 20:17:21 -0400 (0:00:00.060) 0:01:41.995 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user does not exist] **********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9
Monday 07 July 2025 20:17:21 -0400 (0:00:00.034) 0:01:42.030 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set group for podman user] ************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16
Monday 07 July 2025 20:17:21 -0400 (0:00:00.035) 0:01:42.066 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_group": "0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : See if getsubids exists] **************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31
Monday 07 July 2025 20:17:21 -0400 (0:00:00.043) 0:01:42.110 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933443.724815,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 32,
"charset": "binary",
"checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b",
"ctime": 1751933414.2563267,
"dev": 51713,
"device_type": 0,
"executable": true,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 6986653,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "application/x-sharedlib",
"mode": "0755",
"mtime": 1700557386.0,
"nlink": 1,
"path": "/usr/bin/getsubids",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 12640,
"uid": 0,
"version": "4263604762",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": true,
"xoth": true,
"xusr": true
}
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42
Monday 07 July 2025 20:17:22 -0400 (0:00:00.352) 0:01:42.462 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47
Monday 07 July 2025 20:17:22 -0400 (0:00:00.038) 0:01:42.500 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52
Monday 07 July 2025 20:17:22 -0400 (0:00:00.032) 0:01:42.533 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_user not in [\"root\", \"0\"]",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subuid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65
Monday 07 July 2025 20:17:22 -0400 (0:00:00.034) 0:01:42.567 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Get subgid file] **********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70
Monday 07 July 2025 20:17:22 -0400 (0:00:00.033) 0:01:42.601 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75
Monday 07 July 2025 20:17:22 -0400 (0:00:00.034) 0:01:42.635 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85
Monday 07 July 2025 20:17:22 -0400 (0:00:00.033) 0:01:42.669 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ******
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92
Monday 07 July 2025 20:17:22 -0400 (0:00:00.079) 0:01:42.749 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "not __podman_stat_getsubids.stat.exists",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62
Monday 07 July 2025 20:17:22 -0400 (0:00:00.034) 0:01:42.783 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_activate_systemd_unit": true,
"__podman_images_found": [],
"__podman_kube_yamls_raw": [
"quadlet-demo.yml"
],
"__podman_service_name": "quadlet-demo.service",
"__podman_systemd_scope": "system",
"__podman_user_home_dir": "/root",
"__podman_xdg_runtime_dir": "/run/user/0"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73
Monday 07 July 2025 20:17:22 -0400 (0:00:00.054) 0:01:42.838 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_quadlet_path": "/etc/containers/systemd"
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Get kube yaml contents] ***************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77
Monday 07 July 2025 20:17:22 -0400 (0:00:00.037) 0:01:42.875 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_state != \"absent\"",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88
Monday 07 July 2025 20:17:22 -0400 (0:00:00.031) 0:01:42.907 ***********
ok: [managed-node1] => {
"ansible_facts": {
"__podman_images": [],
"__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube",
"__podman_volumes": []
},
"changed": false
}
TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] ***
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106
Monday 07 July 2025 20:17:22 -0400 (0:00:00.076) 0:01:42.983 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Cleanup quadlets] *********************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113
Monday 07 July 2025 20:17:22 -0400 (0:00:00.039) 0:01:43.023 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] *****************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4
Monday 07 July 2025 20:17:22 -0400 (0:00:00.076) 0:01:43.100 ***********
skipping: [managed-node1] => {
"changed": false,
"false_condition": "__podman_rootless | bool",
"skip_reason": "Conditional result was False"
}
TASK [fedora.linux_system_roles.podman : Stop and disable service] *************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Monday 07 July 2025 20:17:22 -0400 (0:00:00.031) 0:01:43.131 ***********
changed: [managed-node1] => {
"changed": true,
"enabled": false,
"failed_when_result": false,
"name": "quadlet-demo.service",
"state": "stopped",
"status": {
"ActiveEnterTimestamp": "Mon 2025-07-07 20:17:01 EDT",
"ActiveEnterTimestampMonotonic": "826935208",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "systemd-journald.socket sysinit.target quadlet-demo-mysql.service quadlet-demo-network.service basic.target system.slice -.mount",
"AllowIsolate": "no",
"AllowedCPUs": "",
"AllowedMemoryNodes": "",
"AmbientCapabilities": "",
"AssertResult": "yes",
"AssertTimestamp": "Mon 2025-07-07 20:17:00 EDT",
"AssertTimestampMonotonic": "825912718",
"Before": "shutdown.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "[not set]",
"CPUAccounting": "no",
"CPUAffinity": "",
"CPUAffinityFromNUMA": "no",
"CPUQuotaPerSecUSec": "infinity",
"CPUQuotaPeriodUSec": "infinity",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "[not set]",
"CPUUsageNSec": "[not set]",
"CPUWeight": "[not set]",
"CacheDirectoryMode": "0755",
"CanFreeze": "yes",
"CanIsolate": "no",
"CanReload": "no",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf",
"CollectMode": "inactive",
"ConditionResult": "yes",
"ConditionTimestamp": "Mon 2025-07-07 20:17:00 EDT",
"ConditionTimestampMonotonic": "825912717",
"ConfigurationDirectoryMode": "0755",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/quadlet-demo.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"DefaultMemoryLow": "0",
"DefaultMemoryMin": "0",
"Delegate": "no",
"Description": "quadlet-demo.service",
"DevicePolicy": "auto",
"DynamicUser": "no",
"EffectiveCPUs": "",
"EffectiveMemoryNodes": "",
"Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "75317",
"ExecMainStartTimestamp": "Mon 2025-07-07 20:17:01 EDT",
"ExecMainStartTimestampMonotonic": "826935186",
"ExecMainStatus": "0",
"ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network=systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Mon 2025-07-07 20:17:00 EDT] ; stop_time=[n/a] ; pid=75258 ; code=(null) ; status=0/0 }",
"ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FailureAction": "none",
"FileDescriptorStoreMax": "0",
"FragmentPath": "/run/systemd/generator/quadlet-demo.service",
"FreezerState": "running",
"GID": "[not set]",
"GuessMainPID": "yes",
"IOAccounting": "no",
"IOSchedulingClass": "0",
"IOSchedulingPriority": "0",
"IOWeight": "[not set]",
"IPAccounting": "no",
"IPEgressBytes": "18446744073709551615",
"IPEgressPackets": "18446744073709551615",
"IPIngressBytes": "18446744073709551615",
"IPIngressPackets": "18446744073709551615",
"Id": "quadlet-demo.service",
"IgnoreOnIsolate": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Mon 2025-07-07 20:17:00 EDT",
"InactiveExitTimestampMonotonic": "825914111",
"InvocationID": "809648cfbec24b3b8a6633b4df8af6e6",
"JobRunningTimeoutUSec": "infinity",
"JobTimeoutAction": "none",
"JobTimeoutUSec": "infinity",
"KeyringMode": "private",
"KillMode": "mixed",
"KillSignal": "15",
"LimitAS": "infinity",
"LimitASSoft": "infinity",
"LimitCORE": "infinity",
"LimitCORESoft": "0",
"LimitCPU": "infinity",
"LimitCPUSoft": "infinity",
"LimitDATA": "infinity",
"LimitDATASoft": "infinity",
"LimitFSIZE": "infinity",
"LimitFSIZESoft": "infinity",
"LimitLOCKS": "infinity",
"LimitLOCKSSoft": "infinity",
"LimitMEMLOCK": "65536",
"LimitMEMLOCKSoft": "65536",
"LimitMSGQUEUE": "819200",
"LimitMSGQUEUESoft": "819200",
"LimitNICE": "0",
"LimitNICESoft": "0",
"LimitNOFILE": "262144",
"LimitNOFILESoft": "1024",
"LimitNPROC": "14003",
"LimitNPROCSoft": "14003",
"LimitRSS": "infinity",
"LimitRSSSoft": "infinity",
"LimitRTPRIO": "0",
"LimitRTPRIOSoft": "0",
"LimitRTTIME": "infinity",
"LimitRTTIMESoft": "infinity",
"LimitSIGPENDING": "14003",
"LimitSIGPENDINGSoft": "14003",
"LimitSTACK": "infinity",
"LimitSTACKSoft": "8388608",
"LoadState": "loaded",
"LockPersonality": "no",
"LogLevelMax": "-1",
"LogRateLimitBurst": "0",
"LogRateLimitIntervalUSec": "0",
"LogsDirectoryMode": "0755",
"MainPID": "75317",
"MemoryAccounting": "yes",
"MemoryCurrent": "5271552",
"MemoryDenyWriteExecute": "no",
"MemoryHigh": "infinity",
"MemoryLimit": "infinity",
"MemoryLow": "0",
"MemoryMax": "infinity",
"MemoryMin": "0",
"MemorySwapMax": "infinity",
"MountAPIVFS": "no",
"MountFlags": "",
"NFileDescriptorStore": "0",
"NRestarts": "0",
"NUMAMask": "",
"NUMAPolicy": "n/a",
"Names": "quadlet-demo.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "all",
"OOMScoreAdjust": "0",
"OnFailureJobMode": "replace",
"PermissionsStartOnly": "no",
"Perpetual": "no",
"PrivateDevices": "no",
"PrivateMounts": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"PrivateUsers": "no",
"ProtectControlGroups": "no",
"ProtectHome": "no",
"ProtectKernelModules": "no",
"ProtectKernelTunables": "no",
"ProtectSystem": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"RemoveIPC": "no",
"Requires": "-.mount system.slice quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service",
"RequiresMountsFor": "/run/containers",
"Restart": "no",
"RestartUSec": "100ms",
"RestrictNamespaces": "no",
"RestrictRealtime": "no",
"RestrictSUIDSGID": "no",
"Result": "success",
"RootDirectoryStartOnly": "no",
"RuntimeDirectoryMode": "0755",
"RuntimeDirectoryPreserve": "no",
"RuntimeMaxUSec": "infinity",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"SourcePath": "/etc/containers/systemd/quadlet-demo.kube",
"StandardError": "inherit",
"StandardInput": "null",
"StandardInputData": "",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitIntervalUSec": "10s",
"StartupBlockIOWeight": "[not set]",
"StartupCPUShares": "[not set]",
"StartupCPUWeight": "[not set]",
"StartupIOWeight": "[not set]",
"StateChangeTimestamp": "Mon 2025-07-07 20:17:01 EDT",
"StateChangeTimestampMonotonic": "826935208",
"StateDirectoryMode": "0755",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SuccessAction": "none",
"SyslogFacility": "3",
"SyslogIdentifier": "quadlet-demo",
"SyslogLevel": "6",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"SystemCallErrorNumber": "0",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TasksAccounting": "yes",
"TasksCurrent": "4",
"TasksMax": "22405",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "notify",
"UID": "[not set]",
"UMask": "0022",
"UnitFilePreset": "disabled",
"UnitFileState": "generated",
"UtmpMode": "init",
"WatchdogTimestamp": "Mon 2025-07-07 20:17:01 EDT",
"WatchdogTimestampMonotonic": "826935205",
"WatchdogUSec": "0"
}
}
TASK [fedora.linux_system_roles.podman : See if quadlet file exists] ***********
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34
Monday 07 July 2025 20:17:24 -0400 (0:00:01.271) 0:01:44.402 ***********
ok: [managed-node1] => {
"changed": false,
"stat": {
"atime": 1751933820.2896745,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 8,
"charset": "us-ascii",
"checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7",
"ctime": 1751933819.7006657,
"dev": 51713,
"device_type": 0,
"executable": false,
"exists": true,
"gid": 0,
"gr_name": "root",
"inode": 18874916,
"isblk": false,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": true,
"issock": false,
"isuid": false,
"mimetype": "text/plain",
"mode": "0644",
"mtime": 1751933819.4476619,
"nlink": 1,
"path": "/etc/containers/systemd/quadlet-demo.kube",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": true,
"rusr": true,
"size": 456,
"uid": 0,
"version": "3005850106",
"wgrp": false,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39
Monday 07 July 2025 20:17:24 -0400 (0:00:00.358) 0:01:44.761 ***********
included: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1
TASK [fedora.linux_system_roles.podman : Slurp quadlet file] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6
Monday 07 July 2025 20:17:24 -0400 (0:00:00.058) 0:01:44.819 ***********
ok: [managed-node1] => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"changed": false
}
TASK [fedora.linux_system_roles.podman : Parse quadlet file] *******************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12
Monday 07 July 2025 20:17:24 -0400 (0:00:00.336) 0:01:45.156 ***********
fatal: [managed-node1]: FAILED! => {
"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
}
TASK [Debug] *******************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:207
Monday 07 July 2025 20:17:25 -0400 (0:00:00.038) 0:01:45.194 ***********
ok: [managed-node1] => {
"changed": false,
"cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n",
"delta": "0:00:00.404416",
"end": "2025-07-07 20:17:25.707506",
"rc": 0,
"start": "2025-07-07 20:17:25.303090"
}
STDERR:
+ set -o pipefail
+ systemctl list-units --plain -l --all
+ grep quadlet
quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service
quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service
quadlet-demo-network.service loaded active exited quadlet-demo-network.service
+ systemctl list-unit-files --all
+ grep quadlet
auth_test_1_quadlet.service generated
quadlet-demo-mysql-volume.service generated
quadlet-demo-mysql.service generated
quadlet-demo-network.service generated
quadlet-demo.service generated
+ systemctl list-units --plain --failed -l --all
+ grep quadlet
+ :
TASK [Get journald] ************************************************************
task path: /tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:217
Monday 07 July 2025 20:17:25 -0400 (0:00:00.742) 0:01:45.936 ***********
fatal: [managed-node1]: FAILED! => {
"changed": false,
"cmd": [
"journalctl",
"-ex"
],
"delta": "0:00:00.032797",
"end": "2025-07-07 20:17:26.077693",
"failed_when_result": true,
"rc": 0,
"start": "2025-07-07 20:17:26.044896"
}
STDOUT:
-- Logs begin at Mon 2025-07-07 20:03:15 EDT, end at Mon 2025-07-07 20:17:26 EDT. --
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:10 managed-node1 platform-python[27013]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_g1udakmi_podman/auth/auth.json follow=True get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:14 managed-node1 platform-python[27758]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:15 managed-node1 platform-python[28085]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:17 managed-node1 platform-python[28545]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:17 managed-node1 platform-python[28795]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:20 managed-node1 platform-python[29527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:21 managed-node1 platform-python[29851]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:23 managed-node1 platform-python[30442]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:23 managed-node1 platform-python[30638]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:24 managed-node1 platform-python[30832]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:24 managed-node1 platform-python[30948]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _original_basename=tmp_vaq3m9x recurse=False state=file path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:25 managed-node1 platform-python[31147]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 42ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:25 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice.
-- Subject: Unit machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:26 managed-node1 platform-python[31516]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:29 managed-node1 platform-python[32231]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:30 managed-node1 platform-python[32459]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:30 managed-node1 platform-python[32558]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/systemd/auth_test_1_quadlet.container _original_basename=systemd.j2 recurse=False state=file path=/etc/containers/systemd/auth_test_1_quadlet.container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:31 managed-node1 platform-python[32893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 42ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:35 managed-node1 platform-python[33742]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:36 managed-node1 platform-python[34048]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:38 managed-node1 platform-python[34578]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:40 managed-node1 platform-python[34951]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:42 managed-node1 platform-python[35361]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:42 managed-node1 platform-python[35648]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:43 managed-node1 platform-python[35861]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Jul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.
-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:12:43 managed-node1 systemd[1]: Reloading.
Jul 07 20:12:43 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...
-- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun shutting down.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope completed and consumed the indicated resources.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.
Jul 07 20:12:44 managed-node1 kernel: cni-podman1: port 1(vethd00911db) entered disabled state
Jul 07 20:12:44 managed-node1 kernel: device vethd00911db left promiscuous mode
Jul 07 20:12:44 managed-node1 kernel: cni-podman1: port 1(vethd00911db) entered disabled state
Jul 07 20:12:44 managed-node1 systemd[1]: run-netns-netns\x2d31539c74\x2d0dd9\x2d75de\x2d2758\x2d32dd9b11acd9.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2d31539c74\x2d0dd9\x2d75de\x2d2758\x2d32dd9b11acd9.mount has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-8ab929d1d2d0c54129da2724994df9d3ecf5a006deddf7227971135d72c5c9b7-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-8ab929d1d2d0c54129da2724994df9d3ecf5a006deddf7227971135d72c5c9b7-merged.mount has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice.
-- Subject: Unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice has finished shutting down.
Jul 07 20:12:44 managed-node1 systemd[1]: machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice: Consumed 8.855s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice completed and consumed the indicated resources.
Jul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-e92981e35d0e3220d4f6a642c4ebd01c543f98e36791b4fd0dad2ba868c2b8b2-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-e92981e35d0e3220d4f6a642c4ebd01c543f98e36791b4fd0dad2ba868c2b8b2-merged.mount has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope: Consumed 34ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope completed and consumed the indicated resources.
Jul 07 20:12:44 managed-node1 podman[35948]: Pods stopped:
Jul 07 20:12:44 managed-node1 podman[35948]: 32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2
Jul 07 20:12:44 managed-node1 podman[35948]: Pods removed:
Jul 07 20:12:44 managed-node1 podman[35948]: 32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2
Jul 07 20:12:44 managed-node1 podman[35948]: Secrets removed:
Jul 07 20:12:44 managed-node1 podman[35948]: Volumes removed:
Jul 07 20:12:44 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.
-- Subject: Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished shutting down.
Jul 07 20:12:44 managed-node1 platform-python[36205]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6ff63d58f28e47f3ff7d69686fa0fa0d8b689b5f23097fa79213e92ebd3c1a82-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-6ff63d58f28e47f3ff7d69686fa0fa0d8b689b5f23097fa79213e92ebd3c1a82-merged.mount has successfully entered the 'dead' state.
Jul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:12:45 managed-node1 platform-python[36330]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Jul 07 20:12:45 managed-node1 platform-python[36330]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml
Jul 07 20:12:45 managed-node1 platform-python[36466]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:12:46 managed-node1 platform-python[36589]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:47 managed-node1 platform-python[36714]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:12:48 managed-node1 systemd[1]: Reloading.
Jul 07 20:12:48 managed-node1 platform-python[36869]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:49 managed-node1 platform-python[37117]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:51 managed-node1 platform-python[37366]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:12:52 managed-node1 platform-python[37495]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:55 managed-node1 platform-python[37620]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:57 managed-node1 platform-python[37745]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:58 managed-node1 platform-python[37870]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:12:59 managed-node1 platform-python[37995]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:00 managed-node1 platform-python[38119]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None
Jul 07 20:13:00 managed-node1 platform-python[38246]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:01 managed-node1 platform-python[38369]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:02 managed-node1 platform-python[38492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:04 managed-node1 platform-python[38617]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:13:04 managed-node1 systemd[1]: Reloading.
Jul 07 20:13:04 managed-node1 platform-python[38772]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:05 managed-node1 platform-python[39020]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:08 managed-node1 platform-python[39269]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:09 managed-node1 platform-python[39398]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None
Jul 07 20:13:09 managed-node1 platform-python[39522]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:10 managed-node1 platform-python[39646]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Jul 07 20:13:10 managed-node1 platform-python[39772]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:11 managed-node1 platform-python[39895]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:11 managed-node1 platform-python[40026]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:12 managed-node1 systemd[1]: libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 systemd[1]: libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Consumed 286ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope completed and consumed the indicated resources.
Jul 07 20:13:12 managed-node1 kernel: cni-podman0: port 1(veth68fe629e) entered disabled state
Jul 07 20:13:12 managed-node1 kernel: device veth68fe629e left promiscuous mode
Jul 07 20:13:12 managed-node1 kernel: cni-podman0: port 1(veth68fe629e) entered disabled state
Jul 07 20:13:12 managed-node1 systemd[1]: run-netns-netns\x2d9a2a84ae\x2dd2b7\x2d43e4\x2de6fc\x2d789ee7470be2.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2d9a2a84ae\x2dd2b7\x2d43e4\x2de6fc\x2d789ee7470be2.mount has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay-9932edc33cbafb6e7ecfae77d0fb277900da93d8d1700e844f2e7a339147077c-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-9932edc33cbafb6e7ecfae77d0fb277900da93d8d1700e844f2e7a339147077c-merged.mount has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 systemd[1]: libpod-conmon-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-conmon-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope has successfully entered the 'dead' state.
Jul 07 20:13:12 managed-node1 platform-python[40256]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 3d862ddec4300f01d16f48f9bc70e1ab9060957c31772608c131707fa7e9530a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:12 managed-node1 platform-python[40386]: ansible-file Invoked with path=/tmp/lsr_g1udakmi_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:15 managed-node1 platform-python[40548]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:13:15 managed-node1 platform-python[40708]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:16 managed-node1 platform-python[40831]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:18 managed-node1 platform-python[41079]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:19 managed-node1 platform-python[41208]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:13:19 managed-node1 platform-python[41332]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:21 managed-node1 platform-python[41457]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None
Jul 07 20:13:22 managed-node1 platform-python[41580]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_eh60794g/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:22 managed-node1 platform-python[41704]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Jul 07 20:13:22 managed-node1 useradd[41709]: new group: name=user1, GID=1000
Jul 07 20:13:22 managed-node1 useradd[41709]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash
Jul 07 20:13:25 managed-node1 platform-python[41962]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:26 managed-node1 platform-python[42091]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None
Jul 07 20:13:26 managed-node1 platform-python[42215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:26 managed-node1 platform-python[42340]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:27 managed-node1 platform-python[42464]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:28 managed-node1 platform-python[42588]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:28 managed-node1 platform-python[42711]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:29 managed-node1 platform-python[42810]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933608.4442887-13818-203828107249767/source _original_basename=tmp2im13war follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:29 managed-node1 platform-python[42935]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:29 managed-node1 platform-python[43058]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:30 managed-node1 platform-python[43157]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933609.6925502-13875-87176783089164/source _original_basename=tmpedt3m65n follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:30 managed-node1 platform-python[43282]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:31 managed-node1 platform-python[43405]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:31 managed-node1 platform-python[43504]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933610.9488542-13923-231091939259467/source _original_basename=tmpecztpwnu follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:31 managed-node1 platform-python[43629]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:32 managed-node1 platform-python[43752]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:32 managed-node1 platform-python[43875]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:33 managed-node1 platform-python[43974]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933612.5160282-13996-72093044894160/source _original_basename=tmpfiqb05ty follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:33 managed-node1 platform-python[44099]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:34 managed-node1 platform-python[44224]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:34 managed-node1 platform-python[44348]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:35 managed-node1 platform-python[44472]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:36 managed-node1 platform-python[44819]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:37 managed-node1 platform-python[44944]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:37 managed-node1 platform-python[45068]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:38 managed-node1 platform-python[45192]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:38 managed-node1 platform-python[45317]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:39 managed-node1 platform-python[45442]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:39 managed-node1 platform-python[45567]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:39 managed-node1 platform-python[45692]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:42 managed-node1 platform-python[45941]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:43 managed-node1 platform-python[46070]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:44 managed-node1 platform-python[46195]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:44 managed-node1 platform-python[46319]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:45 managed-node1 platform-python[46443]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:45 managed-node1 platform-python[46566]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:45 managed-node1 platform-python[46630]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=tmpv6awi9kz recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:46 managed-node1 platform-python[46753]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:46 managed-node1 platform-python[46876]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:46 managed-node1 platform-python[46940]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpld1z9971 recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:47 managed-node1 platform-python[47063]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:47 managed-node1 platform-python[47186]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:13:48 managed-node1 platform-python[47250]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=tmpogtggds4 recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:48 managed-node1 platform-python[47373]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:48 managed-node1 platform-python[47496]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:49 managed-node1 platform-python[47621]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json
Jul 07 20:13:50 managed-node1 platform-python[47744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:50 managed-node1 platform-python[47869]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:50 managed-node1 platform-python[47993]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:51 managed-node1 platform-python[48117]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:13:53 managed-node1 platform-python[48427]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:53 managed-node1 platform-python[48552]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:54 managed-node1 platform-python[48676]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:55 managed-node1 platform-python[48800]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:55 managed-node1 platform-python[48925]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:55 managed-node1 platform-python[49050]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:56 managed-node1 platform-python[49175]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:13:58 managed-node1 platform-python[49425]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:13:59 managed-node1 platform-python[49554]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:13:59 managed-node1 platform-python[49678]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:00 managed-node1 platform-python[49803]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:01 managed-node1 platform-python[49926]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:01 managed-node1 platform-python[50025]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933640.7999432-15017-174714386022238/source _original_basename=tmpwmkwc7_r follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:01 managed-node1 platform-python[50150]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:02 managed-node1 platform-python[50273]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:02 managed-node1 platform-python[50372]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933641.9483154-15075-274864138293397/source _original_basename=tmpuqrumlre follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:02 managed-node1 platform-python[50497]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:03 managed-node1 platform-python[50620]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:03 managed-node1 platform-python[50721]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933643.0880578-15126-78971180479869/source _original_basename=tmp124ngalx follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:04 managed-node1 platform-python[50846]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:04 managed-node1 platform-python[50969]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:04 managed-node1 platform-python[51094]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:05 managed-node1 platform-python[51217]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:05 managed-node1 platform-python[51318]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933644.9727504-15206-80436667009190/source _original_basename=tmp5mucblfd follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:06 managed-node1 platform-python[51443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:07 managed-node1 platform-python[51568]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:09 managed-node1 platform-python[51917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:10 managed-node1 platform-python[52042]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:10 managed-node1 platform-python[52167]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:11 managed-node1 platform-python[52292]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:11 managed-node1 platform-python[52417]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:14 managed-node1 platform-python[52667]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:15 managed-node1 platform-python[52795]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:15 managed-node1 platform-python[52920]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:16 managed-node1 platform-python[53043]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:16 managed-node1 platform-python[53107]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=tmpxlpxk23w recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:17 managed-node1 platform-python[53230]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:17 managed-node1 platform-python[53353]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:17 managed-node1 platform-python[53417]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpggptjz7u recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:18 managed-node1 platform-python[53540]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:18 managed-node1 platform-python[53663]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:19 managed-node1 platform-python[53727]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=tmpplg2p99p recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:19 managed-node1 platform-python[53850]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:19 managed-node1 platform-python[53973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:20 managed-node1 platform-python[54098]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:21 managed-node1 platform-python[54221]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:21 managed-node1 platform-python[54346]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:23 managed-node1 platform-python[54656]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:24 managed-node1 platform-python[54781]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:24 managed-node1 platform-python[54906]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:25 managed-node1 platform-python[55031]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:25 managed-node1 platform-python[55156]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:25 managed-node1 platform-python[55281]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:26 managed-node1 platform-python[55404]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf
Jul 07 20:14:26 managed-node1 platform-python[55527]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf
Jul 07 20:14:27 managed-node1 platform-python[55650]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:27 managed-node1 platform-python[55773]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf
Jul 07 20:14:28 managed-node1 platform-python[55896]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf
Jul 07 20:14:31 managed-node1 platform-python[56144]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:32 managed-node1 platform-python[56273]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:32 managed-node1 platform-python[56398]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:33 managed-node1 platform-python[56521]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:33 managed-node1 platform-python[56622]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933673.0533648-16059-112177277073811/source _original_basename=tmpkc2w6i8h follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:34 managed-node1 platform-python[56747]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:34 managed-node1 platform-python[56870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:34 managed-node1 platform-python[56934]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpmv6x1kbl recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:35 managed-node1 platform-python[57057]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:35 managed-node1 platform-python[57180]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:14:36 managed-node1 platform-python[57244]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=tmpq0t44buq recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:36 managed-node1 platform-python[57367]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:36 managed-node1 platform-python[57490]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:37 managed-node1 platform-python[57615]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json
Jul 07 20:14:38 managed-node1 platform-python[57738]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf
Jul 07 20:14:38 managed-node1 platform-python[57861]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:38 managed-node1 platform-python[57984]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:39 managed-node1 platform-python[58107]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:39 managed-node1 platform-python[58230]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:39 managed-node1 platform-python[58353]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:40 managed-node1 platform-python[58476]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:40 managed-node1 platform-python[58599]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:40 managed-node1 platform-python[58722]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:41 managed-node1 platform-python[58845]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:41 managed-node1 platform-python[58968]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:41 managed-node1 platform-python[59091]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_eh60794g/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:42 managed-node1 platform-python[59215]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_eh60794g recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:14:44 managed-node1 platform-python[59377]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:14:44 managed-node1 platform-python[59504]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:45 managed-node1 platform-python[59627]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:47 managed-node1 platform-python[59875]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:48 managed-node1 platform-python[60004]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:14:49 managed-node1 platform-python[60128]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:52 managed-node1 platform-python[60292]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:14:55 managed-node1 platform-python[60452]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:55 managed-node1 platform-python[60575]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:14:58 managed-node1 platform-python[60823]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:14:59 managed-node1 platform-python[60952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:14:59 managed-node1 platform-python[61076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:04 managed-node1 platform-python[61240]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:15:05 managed-node1 platform-python[61400]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:05 managed-node1 platform-python[61523]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:08 managed-node1 platform-python[61771]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:09 managed-node1 platform-python[61900]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:15:10 managed-node1 platform-python[62024]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:12 managed-node1 platform-python[62149]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:13 managed-node1 platform-python[62274]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:14 managed-node1 platform-python[62397]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:15:14 managed-node1 platform-python[62496]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.1632168-17986-2851343888714/source dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:17 managed-node1 platform-python[62746]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:18 managed-node1 platform-python[62875]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:20 managed-node1 platform-python[63000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:23 managed-node1 platform-python[63263]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:24 managed-node1 platform-python[63386]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:15:24 managed-node1 platform-python[63485]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933724.0105758-18331-153209654949797/source dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:27 managed-node1 platform-python[63735]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:28 managed-node1 platform-python[63864]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:30 managed-node1 platform-python[63989]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:32 managed-node1 platform-python[64114]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:15:32 managed-node1 platform-python[64240]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:33 managed-node1 platform-python[64488]: ansible-ansible.legacy.command Invoked with _raw_params=set -x
set -o pipefail
exec 1>&2
#podman volume rm --all
#podman network prune -f
podman volume ls
podman network ls
podman secret ls
podman container ls
podman pod ls
podman images
systemctl list-units | grep quadlet
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:15:36 managed-node1 platform-python[64807]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:37 managed-node1 platform-python[64936]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None
Jul 07 20:15:37 managed-node1 platform-python[65060]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:40 managed-node1 platform-python[65223]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Jul 07 20:15:41 managed-node1 platform-python[65383]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:42 managed-node1 platform-python[65506]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:42 managed-node1 platform-python[65630]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Jul 07 20:15:45 managed-node1 platform-python[65754]: ansible-ansible.legacy.dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Jul 07 20:15:48 managed-node1 platform-python[65878]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:49 managed-node1 platform-python[66001]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:49 managed-node1 platform-python[66124]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:15:50 managed-node1 platform-python[66251]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#
# Ansible managed
#
# system_role:certificate
booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:50 managed-node1 certmonger[66267]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved.
Jul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:51 managed-node1 platform-python[66389]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jul 07 20:15:51 managed-node1 platform-python[66512]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key
Jul 07 20:15:52 managed-node1 platform-python[66635]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt
Jul 07 20:15:52 managed-node1 platform-python[66758]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:52 managed-node1 certmonger[12419]: 2025-07-07 20:15:52 [12419] Wrote to /var/lib/certmonger/requests/20250708001550
Jul 07 20:15:53 managed-node1 platform-python[66882]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:53 managed-node1 platform-python[67005]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:53 managed-node1 platform-python[67128]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:15:54 managed-node1 platform-python[67251]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:54 managed-node1 platform-python[67374]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:15:57 managed-node1 platform-python[67622]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:15:58 managed-node1 platform-python[67751]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None
Jul 07 20:15:58 managed-node1 platform-python[67875]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:00 managed-node1 platform-python[68000]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:00 managed-node1 platform-python[68123]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:01 managed-node1 platform-python[68246]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:16:01 managed-node1 platform-python[68370]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Jul 07 20:16:04 managed-node1 platform-python[68494]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Jul 07 20:16:05 managed-node1 platform-python[68621]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:16:05 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:05 managed-node1 systemd[1]: Starting firewalld - dynamic firewall daemon...
-- Subject: Unit firewalld.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit firewalld.service has begun starting up.
Jul 07 20:16:05 managed-node1 systemd[1]: Started firewalld - dynamic firewall daemon.
-- Subject: Unit firewalld.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit firewalld.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:06 managed-node1 firewalld[68657]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now.
Jul 07 20:16:06 managed-node1 platform-python[68836]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:16:07 managed-node1 platform-python[68959]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:16:07 managed-node1 rsyslogd[1044]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]
Jul 07 20:16:10 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:11 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:12 managed-node1 platform-python[69533]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:13 managed-node1 platform-python[69658]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:14 managed-node1 platform-python[69781]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:14 managed-node1 platform-python[69880]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933773.8054082-20225-128599699437087/source dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:15 managed-node1 platform-python[70005]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:15 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:15 managed-node1 quadlet-generator[68632]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:15 managed-node1 platform-python[70158]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:15 managed-node1 systemd[1]: Starting quadlet-demo-network.service...
-- Subject: Unit quadlet-demo-network.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-network.service has begun starting up.
Jul 07 20:16:15 managed-node1 quadlet-demo-network[70165]: systemd-quadlet-demo
Jul 07 20:16:15 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:15 managed-node1 systemd[1]: Started quadlet-demo-network.service.
-- Subject: Unit quadlet-demo-network.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-network.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:16 managed-node1 platform-python[70321]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:17 managed-node1 platform-python[70446]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:18 managed-node1 platform-python[70569]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:18 managed-node1 platform-python[70668]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933777.8190536-20316-75804711088380/source dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:18 managed-node1 platform-python[70793]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:18 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:18 managed-node1 quadlet-generator[70014]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:19 managed-node1 platform-python[70946]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:19 managed-node1 systemd[1]: Starting quadlet-demo-mysql-volume.service...
-- Subject: Unit quadlet-demo-mysql-volume.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql-volume.service has begun starting up.
Jul 07 20:16:19 managed-node1 quadlet-demo-mysql-volume[70953]: systemd-quadlet-demo-mysql
Jul 07 20:16:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:19 managed-node1 systemd[1]: Started quadlet-demo-mysql-volume.service.
-- Subject: Unit quadlet-demo-mysql-volume.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql-volume.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:20 managed-node1 platform-python[71083]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:21 managed-node1 platform-python[71208]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:28 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:28 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.
Jul 07 20:16:29 managed-node1 platform-python[71547]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:29 managed-node1 platform-python[71670]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:29 managed-node1 platform-python[71769]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933789.2744262-20446-266850841937438/source _original_basename=tmp5n7_bm4y follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:30 managed-node1 platform-python[71894]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:30 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:30 managed-node1 quadlet-generator[70802]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:30 managed-node1 platform-python[72047]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:16:30 managed-node1 systemd[1]: Starting quadlet-demo-mysql.service...
-- Subject: Unit quadlet-demo-mysql.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has begun starting up.
Jul 07 20:16:31 managed-node1 quadlet-generator[71903]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:31 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf6faf8d5: link is not ready
Jul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered blocking state
Jul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered disabled state
Jul 07 20:16:31 managed-node1 kernel: device vethf6faf8d5 entered promiscuous mode
Jul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered blocking state
Jul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered forwarding state
Jul 07 20:16:31 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf6faf8d5: link becomes ready
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1014] manager: (cni-podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/8)
Jul 07 20:16:31 managed-node1 systemd-udevd[72123]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1024] manager: (vethf6faf8d5): new Veth device (/org/freedesktop/NetworkManager/Devices/9)
Jul 07 20:16:31 managed-node1 systemd-udevd[72122]: Using default interface naming scheme 'rhel-8.0'.
Jul 07 20:16:31 managed-node1 systemd-udevd[72122]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1148] device (vethf6faf8d5): carrier: link connected
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1150] device (cni-podman2): carrier: link connected
Jul 07 20:16:31 managed-node1 systemd-udevd[72123]: Could not generate persistent MAC address for vethf6faf8d5: No such file or directory
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1268] device (cni-podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1277] device (cni-podman2): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1283] device (cni-podman2): Activation: starting connection 'cni-podman2' (167ec242-14e6-424d-8b23-53c994e9c6df)
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1284] device (cni-podman2): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1286] device (cni-podman2): state change: prepare -> config (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1288] device (cni-podman2): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 dbus-daemon[614]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=682 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0")
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1290] device (cni-podman2): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...
-- Subject: Unit NetworkManager-dispatcher.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has begun starting up.
Jul 07 20:16:31 managed-node1 dbus-daemon[614]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Jul 07 20:16:31 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.
-- Subject: Unit NetworkManager-dispatcher.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit NetworkManager-dispatcher.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1747] device (cni-podman2): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1749] device (cni-podman2): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')
Jul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1754] device (cni-podman2): Activation: successful, device activated.
Jul 07 20:16:31 managed-node1 dnsmasq[72191]: listening on cni-podman2(#8): 192.168.30.1
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: started, version 2.79 cachesize 150
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: using local addresses only for domain dns.podman
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: reading /etc/resolv.conf
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: using local addresses only for domain dns.podman
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.29.169.13#53
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.29.170.12#53
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.2.32.1#53
Jul 07 20:16:31 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses
Jul 07 20:16:31 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.
-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.timer has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.timer has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:31 managed-node1 systemd[1]: Started quadlet-demo-mysql.service.
-- Subject: Unit quadlet-demo-mysql.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo-mysql.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:31 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.
-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:16:31 managed-node1 quadlet-demo-mysql[72054]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64
Jul 07 20:16:31 managed-node1 systemd[1]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has successfully entered the 'dead' state.
Jul 07 20:16:32 managed-node1 dnsmasq[72207]: listening on cni-podman2(#8): fe80::10f8:1dff:feca:224%cni-podman2
Jul 07 20:16:32 managed-node1 platform-python[72438]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:33 managed-node1 platform-python[72590]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:33 managed-node1 platform-python[72713]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:34 managed-node1 platform-python[72812]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933793.6082838-20536-246540776008881/source dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:34 managed-node1 platform-python[72955]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:34 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:35 managed-node1 platform-python[73114]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:36 managed-node1 platform-python[73267]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:37 managed-node1 platform-python[73390]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:37 managed-node1 platform-python[73498]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933797.1321883-20621-109965661389418/source _original_basename=tmpklejpicd follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:38 managed-node1 platform-python[73623]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:16:38 managed-node1 systemd[1]: Reloading.
Jul 07 20:16:38 managed-node1 quadlet-generator[72970]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:16:39 managed-node1 platform-python[73783]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:16:40 managed-node1 platform-python[73908]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml
Jul 07 20:16:40 managed-node1 platform-python[74031]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:41 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.
Jul 07 20:16:41 managed-node1 platform-python[74156]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:59 managed-node1 platform-python[74751]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:16:59 managed-node1 platform-python[74874]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True
Jul 07 20:16:59 managed-node1 platform-python[74973]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933819.1617088-20790-103013089228288/source dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:17:00 managed-node1 platform-python[75098]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None
Jul 07 20:17:00 managed-node1 systemd[1]: Reloading.
Jul 07 20:17:00 managed-node1 quadlet-generator[73632]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:17:00 managed-node1 platform-python[75251]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None
Jul 07 20:17:00 managed-node1 systemd[1]: Starting quadlet-demo.service...
-- Subject: Unit quadlet-demo.service has begun start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has begun starting up.
Jul 07 20:17:00 managed-node1 quadlet-demo[75258]: Pods stopped:
Jul 07 20:17:00 managed-node1 quadlet-demo[75258]: Pods removed:
Jul 07 20:17:00 managed-node1 quadlet-demo[75258]: Secrets removed:
Jul 07 20:17:00 managed-node1 quadlet-demo[75258]: Volumes removed:
Jul 07 20:17:01 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice.
-- Subject: Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container 6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.
-- Subject: Unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 NetworkManager[682]: [1751933821.2795] manager: (veth0d2aa8bf): new Veth device (/org/freedesktop/NetworkManager/Devices/10)
Jul 07 20:17:01 managed-node1 systemd-udevd[75344]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Jul 07 20:17:01 managed-node1 systemd-udevd[75344]: Could not generate persistent MAC address for veth0d2aa8bf: No such file or directory
Jul 07 20:17:01 managed-node1 quadlet-generator[75107]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): veth0d2aa8bf: link is not ready
Jul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered blocking state
Jul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state
Jul 07 20:17:01 managed-node1 kernel: device veth0d2aa8bf entered promiscuous mode
Jul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready
Jul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready
Jul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth0d2aa8bf: link becomes ready
Jul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered blocking state
Jul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered forwarding state
Jul 07 20:17:01 managed-node1 NetworkManager[682]: [1751933821.3000] device (veth0d2aa8bf): carrier: link connected
Jul 07 20:17:01 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 2 addresses
Jul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.
-- Subject: Unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container 895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.
-- Subject: Unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.
-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.
-- Subject: Unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: Volumes:
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: wp-pv-claim
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: Pod:
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: Containers:
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: 895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9
Jul 07 20:17:01 managed-node1 quadlet-demo[75258]: a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6
Jul 07 20:17:01 managed-node1 systemd[1]: Started quadlet-demo.service.
-- Subject: Unit quadlet-demo.service has finished start-up
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has finished starting up.
--
-- The start-up result is done.
Jul 07 20:17:02 managed-node1 systemd[1]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has successfully entered the 'dead' state.
Jul 07 20:17:02 managed-node1 platform-python[75742]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:02 managed-node1 platform-python[75934]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:03 managed-node1 platform-python[76092]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:03 managed-node1 platform-python[76230]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:04 managed-node1 platform-python[76361]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:04 managed-node1 platform-python[76487]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jul 07 20:17:05 managed-node1 platform-python[76614]: ansible-ansible.legacy.command Invoked with _raw_params=cat /run/out _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:06 managed-node1 platform-python[76738]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:06 managed-node1 platform-python[76870]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:06 managed-node1 platform-python[77001]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:07 managed-node1 platform-python[77127]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:09 managed-node1 platform-python[77376]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:10 managed-node1 platform-python[77505]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:12 managed-node1 platform-python[77630]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Jul 07 20:17:15 managed-node1 platform-python[77754]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None
Jul 07 20:17:16 managed-node1 platform-python[77881]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Jul 07 20:17:16 managed-node1 platform-python[78008]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:17:17 managed-node1 platform-python[78131]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None
Jul 07 20:17:22 managed-node1 platform-python[78644]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:23 managed-node1 platform-python[78769]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None
Jul 07 20:17:23 managed-node1 systemd[1]: Reloading.
Jul 07 20:17:23 managed-node1 systemd[1]: Stopping quadlet-demo.service...
-- Subject: Unit quadlet-demo.service has begun shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has begun shutting down.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope: Consumed 36ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope completed and consumed the indicated resources.
Jul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-cc262e6d970681dd648fc289e4ce567fcb386d591ba9938e73ee237bbb02d14e-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-cc262e6d970681dd648fc289e4ce567fcb386d591ba9938e73ee237bbb02d14e-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope: Consumed 32ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope completed and consumed the indicated resources.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope: Consumed 128ms CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope completed and consumed the indicated resources.
Jul 07 20:17:23 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses
Jul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-c29144925da4bc55c018f04bef611bd79cddabc79432ac8a4a5bac02cf32c71d-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-c29144925da4bc55c018f04bef611bd79cddabc79432ac8a4a5bac02cf32c71d-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope: Consumed 1.097s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope completed and consumed the indicated resources.
Jul 07 20:17:23 managed-node1 quadlet-generator[78780]: Warning: bogus.container specifies the image "this_is_a_bogus_image" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.
Jul 07 20:17:23 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state
Jul 07 20:17:23 managed-node1 kernel: device veth0d2aa8bf left promiscuous mode
Jul 07 20:17:23 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state
Jul 07 20:17:23 managed-node1 systemd[1]: run-netns-netns\x2d21744e05\x2dcfe3\x2df219\x2d733e\x2d9e275b4aa4fe.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit run-netns-netns\x2d21744e05\x2dcfe3\x2df219\x2d733e\x2d9e275b4aa4fe.mount has successfully entered the 'dead' state.
Jul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-4cc4f3ebd45dd82a955fee4fc7ba69575eb6e40399169da0568ef4a19728535d-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-4cc4f3ebd45dd82a955fee4fc7ba69575eb6e40399169da0568ef4a19728535d-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice.
-- Subject: Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished shutting down.
Jul 07 20:17:24 managed-node1 systemd[1]: machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice: Consumed 1.258s CPU time
-- Subject: Resources consumed by unit runtime
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice completed and consumed the indicated resources.
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: Pods stopped:
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: Pods removed:
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: Secrets removed:
Jul 07 20:17:24 managed-node1 quadlet-demo[78819]: Volumes removed:
Jul 07 20:17:24 managed-node1 systemd[1]: quadlet-demo.service: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit quadlet-demo.service has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: Stopped quadlet-demo.service.
-- Subject: Unit quadlet-demo.service has finished shutting down
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- Unit quadlet-demo.service has finished shutting down.
Jul 07 20:17:24 managed-node1 platform-python[79125]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-2e7d397c3505d1186cb0eec8515ded4aa51320c6484af2e71fdb9a647a72ccec-merged.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay-2e7d397c3505d1186cb0eec8515ded4aa51320c6484af2e71fdb9a647a72ccec-merged.mount has successfully entered the 'dead' state.
Jul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b-userdata-shm.mount: Succeeded.
-- Subject: Unit succeeded
-- Defined-By: systemd
-- Support: https://access.redhat.com/support
--
-- The unit var-lib-containers-storage-overlay\x2dcontainers-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b-userdata-shm.mount has successfully entered the 'dead' state.
Jul 07 20:17:25 managed-node1 platform-python[79373]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2
set -x
set -o pipefail
systemctl list-units --plain -l --all | grep quadlet || :
systemctl list-unit-files --all | grep quadlet || :
systemctl list-units --plain --failed -l --all | grep quadlet || :
_uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jul 07 20:17:26 managed-node1 platform-python[79503]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
PLAY RECAP *********************************************************************
managed-node1 : ok=249 changed=26 unreachable=0 failed=1 skipped=342 rescued=1 ignored=0
SYSTEM ROLES ERRORS BEGIN v1
[
{
"ansible_version": "2.16.14",
"end_time": "2025-07-08T00:17:25.008997+00:00Z",
"host": "managed-node1",
"message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
"start_time": "2025-07-08T00:17:24.989880+00:00Z",
"task_name": "Parse quadlet file",
"task_path": "/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12"
},
{
"ansible_version": "2.16.14",
"delta": "0:00:00.032797",
"end_time": "2025-07-07 20:17:26.077693",
"host": "managed-node1",
"message": "",
"rc": 0,
"start_time": "2025-07-07 20:17:26.044896",
"stdout": "-- Logs begin at Mon 2025-07-07 20:03:15 EDT, end at Mon 2025-07-07 20:17:26 EDT. --\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:10 managed-node1 platform-python[27013]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_g1udakmi_podman/auth/auth.json follow=True get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:10 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:10 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:11 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:11 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:12 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:12 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:13 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:13 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:14 managed-node1 platform-python[27758]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:14 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:14 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:15 managed-node1 platform-python[28085]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:15 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:15 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:16 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:16 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:17 managed-node1 platform-python[28545]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:17 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:17 managed-node1 platform-python[28795]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:17 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:18 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:19 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:20 managed-node1 platform-python[29527]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:20 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:20 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:21 managed-node1 platform-python[29851]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:21 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:21 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:22 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:22 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:23 managed-node1 platform-python[30442]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:23 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:23 managed-node1 platform-python[30638]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:23 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:24 managed-node1 platform-python[30832]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:24 managed-node1 platform-python[30948]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _original_basename=tmp_vaq3m9x recurse=False state=file path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:24 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:24 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:25 managed-node1 platform-python[31147]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 42ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:25 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice.\n-- Subject: Unit machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_2cf8954f7399d3e7be6ffde9df97791abef0102d3bf1a2db611ca545eba8c723.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:25 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:25 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:26 managed-node1 platform-python[31516]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:26 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:26 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:27 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:27 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:28 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:28 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:29 managed-node1 platform-python[32231]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:29 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:29 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:30 managed-node1 platform-python[32459]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:30 managed-node1 platform-python[32558]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/systemd/auth_test_1_quadlet.container _original_basename=systemd.j2 recurse=False state=file path=/etc/containers/systemd/auth_test_1_quadlet.container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:30 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:30 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:31 managed-node1 platform-python[32893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:31 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:31 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:32 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:32 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 42ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:33 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:33 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:34 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:34 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:35 managed-node1 platform-python[33742]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:35 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:35 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:36 managed-node1 platform-python[34048]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:36 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:36 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:37 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:38 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:38 managed-node1 platform-python[34578]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:38 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:39 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:39 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:40 managed-node1 platform-python[34951]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 39ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:40 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:40 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 33ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:42 managed-node1 platform-python[35361]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 38ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 31ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:42 managed-node1 platform-python[35648]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 30ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 35ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 40ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:43 managed-node1 platform-python[35861]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcontainer container 4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.\n-- Subject: Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:12:43 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:43 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun shutting down.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70.scope completed and consumed the indicated resources.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope: Consumed 37ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-4250f7d5c70ed8dba97347b300c3e447792d035d513f847cb1215995ff73d518.scope completed and consumed the indicated resources.\nJul 07 20:12:44 managed-node1 kernel: cni-podman1: port 1(vethd00911db) entered disabled state\nJul 07 20:12:44 managed-node1 kernel: device vethd00911db left promiscuous mode\nJul 07 20:12:44 managed-node1 kernel: cni-podman1: port 1(vethd00911db) entered disabled state\nJul 07 20:12:44 managed-node1 systemd[1]: run-netns-netns\\x2d31539c74\\x2d0dd9\\x2d75de\\x2d2758\\x2d32dd9b11acd9.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d31539c74\\x2d0dd9\\x2d75de\\x2d2758\\x2d32dd9b11acd9.mount has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-8daf9be5f5bc57d5fe264be69ec0a8f2b88417c2345e5169f830abbee6b87c70-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-8ab929d1d2d0c54129da2724994df9d3ecf5a006deddf7227971135d72c5c9b7-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-8ab929d1d2d0c54129da2724994df9d3ecf5a006deddf7227971135d72c5c9b7-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice.\n-- Subject: Unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice has finished shutting down.\nJul 07 20:12:44 managed-node1 systemd[1]: machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice: Consumed 8.855s CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2.slice completed and consumed the indicated resources.\nJul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-e92981e35d0e3220d4f6a642c4ebd01c543f98e36791b4fd0dad2ba868c2b8b2-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-e92981e35d0e3220d4f6a642c4ebd01c543f98e36791b4fd0dad2ba868c2b8b2-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope: Consumed 34ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895.scope completed and consumed the indicated resources.\nJul 07 20:12:44 managed-node1 podman[35948]: Pods stopped:\nJul 07 20:12:44 managed-node1 podman[35948]: 32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2\nJul 07 20:12:44 managed-node1 podman[35948]: Pods removed:\nJul 07 20:12:44 managed-node1 podman[35948]: 32d18caf2d5b9dcf497a950d2ffee4ff15823f2643b9cc2a46127465c521afa2\nJul 07 20:12:44 managed-node1 podman[35948]: Secrets removed:\nJul 07 20:12:44 managed-node1 podman[35948]: Volumes removed:\nJul 07 20:12:44 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n-- Subject: Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished shutting down.\nJul 07 20:12:44 managed-node1 platform-python[36205]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6ff63d58f28e47f3ff7d69686fa0fa0d8b689b5f23097fa79213e92ebd3c1a82-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-6ff63d58f28e47f3ff7d69686fa0fa0d8b689b5f23097fa79213e92ebd3c1a82-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-17e498c9097d2e088cc0f4ba5abbf09a5a31932e8f18089fc5eeef4f21332895-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 platform-python[36330]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:45 managed-node1 platform-python[36330]: ansible-containers.podman.podman_play version: 4.9.4-dev, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml\nJul 07 20:12:45 managed-node1 platform-python[36466]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:46 managed-node1 platform-python[36589]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:47 managed-node1 platform-python[36714]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:12:48 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:48 managed-node1 platform-python[36869]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:49 managed-node1 platform-python[37117]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:51 managed-node1 platform-python[37366]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:52 managed-node1 platform-python[37495]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:55 managed-node1 platform-python[37620]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:57 managed-node1 platform-python[37745]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:58 managed-node1 platform-python[37870]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:59 managed-node1 platform-python[37995]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:00 managed-node1 platform-python[38119]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:00 managed-node1 platform-python[38246]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:01 managed-node1 platform-python[38369]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:02 managed-node1 platform-python[38492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:04 managed-node1 platform-python[38617]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:13:04 managed-node1 systemd[1]: Reloading.\nJul 07 20:13:04 managed-node1 platform-python[38772]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:05 managed-node1 platform-python[39020]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:08 managed-node1 platform-python[39269]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:09 managed-node1 platform-python[39398]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None\nJul 07 20:13:09 managed-node1 platform-python[39522]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:10 managed-node1 platform-python[39646]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:10 managed-node1 platform-python[39772]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:11 managed-node1 platform-python[39895]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:11 managed-node1 platform-python[40026]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:12 managed-node1 systemd[1]: libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 systemd[1]: libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Consumed 286ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope completed and consumed the indicated resources.\nJul 07 20:13:12 managed-node1 kernel: cni-podman0: port 1(veth68fe629e) entered disabled state\nJul 07 20:13:12 managed-node1 kernel: device veth68fe629e left promiscuous mode\nJul 07 20:13:12 managed-node1 kernel: cni-podman0: port 1(veth68fe629e) entered disabled state\nJul 07 20:13:12 managed-node1 systemd[1]: run-netns-netns\\x2d9a2a84ae\\x2dd2b7\\x2d43e4\\x2de6fc\\x2d789ee7470be2.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d9a2a84ae\\x2dd2b7\\x2d43e4\\x2de6fc\\x2d789ee7470be2.mount has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay-9932edc33cbafb6e7ecfae77d0fb277900da93d8d1700e844f2e7a339147077c-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-9932edc33cbafb6e7ecfae77d0fb277900da93d8d1700e844f2e7a339147077c-merged.mount has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 systemd[1]: libpod-conmon-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-conmon-fd6e3c728187a2f7bf9816bdd10f067b96537d0cd0728b652629fd707a52323c.scope has successfully entered the 'dead' state.\nJul 07 20:13:12 managed-node1 platform-python[40256]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 3d862ddec4300f01d16f48f9bc70e1ab9060957c31772608c131707fa7e9530a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:12 managed-node1 platform-python[40386]: ansible-file Invoked with path=/tmp/lsr_g1udakmi_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:15 managed-node1 platform-python[40548]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:13:15 managed-node1 platform-python[40708]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:16 managed-node1 platform-python[40831]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:18 managed-node1 platform-python[41079]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 platform-python[41208]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:19 managed-node1 platform-python[41332]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:21 managed-node1 platform-python[41457]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None\nJul 07 20:13:22 managed-node1 platform-python[41580]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_eh60794g/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:22 managed-node1 platform-python[41704]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:22 managed-node1 useradd[41709]: new group: name=user1, GID=1000\nJul 07 20:13:22 managed-node1 useradd[41709]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash\nJul 07 20:13:25 managed-node1 platform-python[41962]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node1 platform-python[42091]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None\nJul 07 20:13:26 managed-node1 platform-python[42215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:26 managed-node1 platform-python[42340]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:27 managed-node1 platform-python[42464]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:28 managed-node1 platform-python[42588]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:28 managed-node1 platform-python[42711]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:29 managed-node1 platform-python[42810]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933608.4442887-13818-203828107249767/source _original_basename=tmp2im13war follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:29 managed-node1 platform-python[42935]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:29 managed-node1 platform-python[43058]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:30 managed-node1 platform-python[43157]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933609.6925502-13875-87176783089164/source _original_basename=tmpedt3m65n follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:30 managed-node1 platform-python[43282]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:31 managed-node1 platform-python[43405]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:31 managed-node1 platform-python[43504]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933610.9488542-13923-231091939259467/source _original_basename=tmpecztpwnu follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:31 managed-node1 platform-python[43629]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:32 managed-node1 platform-python[43752]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:32 managed-node1 platform-python[43875]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:33 managed-node1 platform-python[43974]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933612.5160282-13996-72093044894160/source _original_basename=tmpfiqb05ty follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:33 managed-node1 platform-python[44099]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:34 managed-node1 platform-python[44224]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:34 managed-node1 platform-python[44348]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:35 managed-node1 platform-python[44472]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:36 managed-node1 platform-python[44819]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:37 managed-node1 platform-python[44944]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:37 managed-node1 platform-python[45068]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:38 managed-node1 platform-python[45192]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:38 managed-node1 platform-python[45317]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node1 platform-python[45442]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node1 platform-python[45567]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node1 platform-python[45692]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:42 managed-node1 platform-python[45941]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:43 managed-node1 platform-python[46070]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:44 managed-node1 platform-python[46195]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:44 managed-node1 platform-python[46319]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:45 managed-node1 platform-python[46443]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:45 managed-node1 platform-python[46566]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:45 managed-node1 platform-python[46630]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=tmpv6awi9kz recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:46 managed-node1 platform-python[46753]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:46 managed-node1 platform-python[46876]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:46 managed-node1 platform-python[46940]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpld1z9971 recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:47 managed-node1 platform-python[47063]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:47 managed-node1 platform-python[47186]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:48 managed-node1 platform-python[47250]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=tmpogtggds4 recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:48 managed-node1 platform-python[47373]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:48 managed-node1 platform-python[47496]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:49 managed-node1 platform-python[47621]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json\nJul 07 20:13:50 managed-node1 platform-python[47744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:50 managed-node1 platform-python[47869]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:50 managed-node1 platform-python[47993]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:51 managed-node1 platform-python[48117]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node1 platform-python[48427]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:53 managed-node1 platform-python[48552]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:54 managed-node1 platform-python[48676]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:55 managed-node1 platform-python[48800]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:55 managed-node1 platform-python[48925]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:55 managed-node1 platform-python[49050]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:56 managed-node1 platform-python[49175]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:58 managed-node1 platform-python[49425]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:59 managed-node1 platform-python[49554]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:59 managed-node1 platform-python[49678]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:00 managed-node1 platform-python[49803]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:01 managed-node1 platform-python[49926]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:01 managed-node1 platform-python[50025]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933640.7999432-15017-174714386022238/source _original_basename=tmpwmkwc7_r follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:01 managed-node1 platform-python[50150]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:02 managed-node1 platform-python[50273]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:02 managed-node1 platform-python[50372]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933641.9483154-15075-274864138293397/source _original_basename=tmpuqrumlre follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:02 managed-node1 platform-python[50497]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:03 managed-node1 platform-python[50620]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:03 managed-node1 platform-python[50721]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933643.0880578-15126-78971180479869/source _original_basename=tmp124ngalx follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:04 managed-node1 platform-python[50846]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:04 managed-node1 platform-python[50969]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:04 managed-node1 platform-python[51094]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:05 managed-node1 platform-python[51217]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:05 managed-node1 platform-python[51318]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933644.9727504-15206-80436667009190/source _original_basename=tmp5mucblfd follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:06 managed-node1 platform-python[51443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:07 managed-node1 platform-python[51568]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:09 managed-node1 platform-python[51917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node1 platform-python[52042]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node1 platform-python[52167]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:11 managed-node1 platform-python[52292]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:11 managed-node1 platform-python[52417]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:14 managed-node1 platform-python[52667]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:15 managed-node1 platform-python[52795]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:15 managed-node1 platform-python[52920]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:16 managed-node1 platform-python[53043]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:16 managed-node1 platform-python[53107]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=tmpxlpxk23w recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:17 managed-node1 platform-python[53230]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:17 managed-node1 platform-python[53353]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:17 managed-node1 platform-python[53417]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpggptjz7u recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:18 managed-node1 platform-python[53540]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:18 managed-node1 platform-python[53663]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:19 managed-node1 platform-python[53727]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=tmpplg2p99p recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:19 managed-node1 platform-python[53850]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:19 managed-node1 platform-python[53973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:20 managed-node1 platform-python[54098]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:21 managed-node1 platform-python[54221]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:21 managed-node1 platform-python[54346]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node1 platform-python[54656]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:24 managed-node1 platform-python[54781]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:24 managed-node1 platform-python[54906]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:25 managed-node1 platform-python[55031]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:25 managed-node1 platform-python[55156]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:25 managed-node1 platform-python[55281]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:26 managed-node1 platform-python[55404]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:26 managed-node1 platform-python[55527]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf\nJul 07 20:14:27 managed-node1 platform-python[55650]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:27 managed-node1 platform-python[55773]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:28 managed-node1 platform-python[55896]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf\nJul 07 20:14:31 managed-node1 platform-python[56144]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:32 managed-node1 platform-python[56273]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:32 managed-node1 platform-python[56398]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:33 managed-node1 platform-python[56521]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:33 managed-node1 platform-python[56622]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933673.0533648-16059-112177277073811/source _original_basename=tmpkc2w6i8h follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:34 managed-node1 platform-python[56747]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:34 managed-node1 platform-python[56870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:34 managed-node1 platform-python[56934]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=tmpmv6x1kbl recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:35 managed-node1 platform-python[57057]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:35 managed-node1 platform-python[57180]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:36 managed-node1 platform-python[57244]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=tmpq0t44buq recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:36 managed-node1 platform-python[57367]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:36 managed-node1 platform-python[57490]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:37 managed-node1 platform-python[57615]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:38 managed-node1 platform-python[57738]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:38 managed-node1 platform-python[57861]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:38 managed-node1 platform-python[57984]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 platform-python[58107]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 platform-python[58230]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 platform-python[58353]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:40 managed-node1 platform-python[58476]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:40 managed-node1 platform-python[58599]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:40 managed-node1 platform-python[58722]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 platform-python[58845]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 platform-python[58968]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 platform-python[59091]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_eh60794g/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:42 managed-node1 platform-python[59215]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_eh60794g recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 platform-python[59377]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:14:44 managed-node1 platform-python[59504]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:45 managed-node1 platform-python[59627]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:47 managed-node1 platform-python[59875]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:48 managed-node1 platform-python[60004]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:49 managed-node1 platform-python[60128]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:52 managed-node1 platform-python[60292]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:14:55 managed-node1 platform-python[60452]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:55 managed-node1 platform-python[60575]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:58 managed-node1 platform-python[60823]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:59 managed-node1 platform-python[60952]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:59 managed-node1 platform-python[61076]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:04 managed-node1 platform-python[61240]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:05 managed-node1 platform-python[61400]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:05 managed-node1 platform-python[61523]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:08 managed-node1 platform-python[61771]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:09 managed-node1 platform-python[61900]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:10 managed-node1 platform-python[62024]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:12 managed-node1 platform-python[62149]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:13 managed-node1 platform-python[62274]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:14 managed-node1 platform-python[62397]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:14 managed-node1 platform-python[62496]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.1632168-17986-2851343888714/source dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:17 managed-node1 platform-python[62746]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:18 managed-node1 platform-python[62875]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:20 managed-node1 platform-python[63000]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:23 managed-node1 platform-python[63263]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:24 managed-node1 platform-python[63386]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:24 managed-node1 platform-python[63485]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933724.0105758-18331-153209654949797/source dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:27 managed-node1 platform-python[63735]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:28 managed-node1 platform-python[63864]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:30 managed-node1 platform-python[63989]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:32 managed-node1 platform-python[64114]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:15:32 managed-node1 platform-python[64240]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:33 managed-node1 platform-python[64488]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:36 managed-node1 platform-python[64807]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:37 managed-node1 platform-python[64936]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None\nJul 07 20:15:37 managed-node1 platform-python[65060]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:40 managed-node1 platform-python[65223]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:41 managed-node1 platform-python[65383]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:42 managed-node1 platform-python[65506]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:42 managed-node1 platform-python[65630]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 07 20:15:45 managed-node1 platform-python[65754]: ansible-ansible.legacy.dnf Invoked with name=['certmonger'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 07 20:15:48 managed-node1 platform-python[65878]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:49 managed-node1 platform-python[66001]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:49 managed-node1 platform-python[66124]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:15:50 managed-node1 platform-python[66251]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=#\n # Ansible managed\n #\n # system_role:certificate\n booted=True provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:50 managed-node1 certmonger[66267]: Certificate in file \"/etc/pki/tls/certs/quadlet_demo.crt\" issued by CA and saved.\nJul 07 20:15:50 managed-node1 certmonger[12419]: 2025-07-07 20:15:50 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:51 managed-node1 platform-python[66389]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 07 20:15:51 managed-node1 platform-python[66512]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key\nJul 07 20:15:52 managed-node1 platform-python[66635]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt\nJul 07 20:15:52 managed-node1 platform-python[66758]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node1 certmonger[12419]: 2025-07-07 20:15:52 [12419] Wrote to /var/lib/certmonger/requests/20250708001550\nJul 07 20:15:53 managed-node1 platform-python[66882]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:53 managed-node1 platform-python[67005]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:53 managed-node1 platform-python[67128]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:54 managed-node1 platform-python[67251]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:54 managed-node1 platform-python[67374]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:57 managed-node1 platform-python[67622]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:58 managed-node1 platform-python[67751]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:58 managed-node1 platform-python[67875]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:00 managed-node1 platform-python[68000]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:00 managed-node1 platform-python[68123]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:01 managed-node1 platform-python[68246]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:01 managed-node1 platform-python[68370]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 07 20:16:04 managed-node1 platform-python[68494]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:16:05 managed-node1 platform-python[68621]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:16:05 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:05 managed-node1 systemd[1]: Starting firewalld - dynamic firewall daemon...\n-- Subject: Unit firewalld.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has begun starting up.\nJul 07 20:16:05 managed-node1 systemd[1]: Started firewalld - dynamic firewall daemon.\n-- Subject: Unit firewalld.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit firewalld.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:06 managed-node1 firewalld[68657]: WARNING: AllowZoneDrifting is enabled. This is considered an insecure configuration option. It will be removed in a future release. Please consider disabling it now.\nJul 07 20:16:06 managed-node1 platform-python[68836]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:16:07 managed-node1 platform-python[68959]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:16:07 managed-node1 rsyslogd[1044]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ]\nJul 07 20:16:10 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:11 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:12 managed-node1 platform-python[69533]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:13 managed-node1 platform-python[69658]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:14 managed-node1 platform-python[69781]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:14 managed-node1 platform-python[69880]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933773.8054082-20225-128599699437087/source dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:15 managed-node1 platform-python[70005]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:15 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:15 managed-node1 quadlet-generator[68632]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:15 managed-node1 platform-python[70158]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:15 managed-node1 systemd[1]: Starting quadlet-demo-network.service...\n-- Subject: Unit quadlet-demo-network.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-network.service has begun starting up.\nJul 07 20:16:15 managed-node1 quadlet-demo-network[70165]: systemd-quadlet-demo\nJul 07 20:16:15 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:15 managed-node1 systemd[1]: Started quadlet-demo-network.service.\n-- Subject: Unit quadlet-demo-network.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-network.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:16 managed-node1 platform-python[70321]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:17 managed-node1 platform-python[70446]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:18 managed-node1 platform-python[70569]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:18 managed-node1 platform-python[70668]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933777.8190536-20316-75804711088380/source dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:18 managed-node1 platform-python[70793]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:18 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:18 managed-node1 quadlet-generator[70014]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:19 managed-node1 platform-python[70946]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:19 managed-node1 systemd[1]: Starting quadlet-demo-mysql-volume.service...\n-- Subject: Unit quadlet-demo-mysql-volume.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-mysql-volume.service has begun starting up.\nJul 07 20:16:19 managed-node1 quadlet-demo-mysql-volume[70953]: systemd-quadlet-demo-mysql\nJul 07 20:16:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:19 managed-node1 systemd[1]: Started quadlet-demo-mysql-volume.service.\n-- Subject: Unit quadlet-demo-mysql-volume.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-mysql-volume.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:20 managed-node1 platform-python[71083]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:21 managed-node1 platform-python[71208]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:28 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:28 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:16:29 managed-node1 platform-python[71547]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:29 managed-node1 platform-python[71670]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:29 managed-node1 platform-python[71769]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933789.2744262-20446-266850841937438/source _original_basename=tmp5n7_bm4y follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:30 managed-node1 platform-python[71894]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:30 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:30 managed-node1 quadlet-generator[70802]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:30 managed-node1 platform-python[72047]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:16:30 managed-node1 systemd[1]: Starting quadlet-demo-mysql.service...\n-- Subject: Unit quadlet-demo-mysql.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-mysql.service has begun starting up.\nJul 07 20:16:31 managed-node1 quadlet-generator[71903]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:31 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): vethf6faf8d5: link is not ready\nJul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered blocking state\nJul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered disabled state\nJul 07 20:16:31 managed-node1 kernel: device vethf6faf8d5 entered promiscuous mode\nJul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered blocking state\nJul 07 20:16:31 managed-node1 kernel: cni-podman2: port 1(vethf6faf8d5) entered forwarding state\nJul 07 20:16:31 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethf6faf8d5: link becomes ready\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1014] manager: (cni-podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/8)\nJul 07 20:16:31 managed-node1 systemd-udevd[72123]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1024] manager: (vethf6faf8d5): new Veth device (/org/freedesktop/NetworkManager/Devices/9)\nJul 07 20:16:31 managed-node1 systemd-udevd[72122]: Using default interface naming scheme 'rhel-8.0'.\nJul 07 20:16:31 managed-node1 systemd-udevd[72122]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1148] device (vethf6faf8d5): carrier: link connected\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1150] device (cni-podman2): carrier: link connected\nJul 07 20:16:31 managed-node1 systemd-udevd[72123]: Could not generate persistent MAC address for vethf6faf8d5: No such file or directory\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1268] device (cni-podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1277] device (cni-podman2): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1283] device (cni-podman2): Activation: starting connection 'cni-podman2' (167ec242-14e6-424d-8b23-53c994e9c6df)\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1284] device (cni-podman2): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1286] device (cni-podman2): state change: prepare -> config (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1288] device (cni-podman2): state change: config -> ip-config (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 dbus-daemon[614]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=682 comm=\"/usr/sbin/NetworkManager --no-daemon \" label=\"system_u:system_r:NetworkManager_t:s0\")\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1290] device (cni-podman2): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n-- Subject: Unit NetworkManager-dispatcher.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has begun starting up.\nJul 07 20:16:31 managed-node1 dbus-daemon[614]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'\nJul 07 20:16:31 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n-- Subject: Unit NetworkManager-dispatcher.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit NetworkManager-dispatcher.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1747] device (cni-podman2): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1749] device (cni-podman2): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external')\nJul 07 20:16:31 managed-node1 NetworkManager[682]: [1751933791.1754] device (cni-podman2): Activation: successful, device activated.\nJul 07 20:16:31 managed-node1 dnsmasq[72191]: listening on cni-podman2(#8): 192.168.30.1\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: started, version 2.79 cachesize 150\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: using local addresses only for domain dns.podman\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: reading /etc/resolv.conf\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: using local addresses only for domain dns.podman\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.29.169.13#53\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.29.170.12#53\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: using nameserver 10.2.32.1#53\nJul 07 20:16:31 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses\nJul 07 20:16:31 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.\n-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.timer has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.timer has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:31 managed-node1 systemd[1]: Started quadlet-demo-mysql.service.\n-- Subject: Unit quadlet-demo-mysql.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo-mysql.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:31 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.\n-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:16:31 managed-node1 quadlet-demo-mysql[72054]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64\nJul 07 20:16:31 managed-node1 systemd[1]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has successfully entered the 'dead' state.\nJul 07 20:16:32 managed-node1 dnsmasq[72207]: listening on cni-podman2(#8): fe80::10f8:1dff:feca:224%cni-podman2\nJul 07 20:16:32 managed-node1 platform-python[72438]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:33 managed-node1 platform-python[72590]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:33 managed-node1 platform-python[72713]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:34 managed-node1 platform-python[72812]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933793.6082838-20536-246540776008881/source dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:34 managed-node1 platform-python[72955]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:34 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:35 managed-node1 platform-python[73114]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:36 managed-node1 platform-python[73267]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:37 managed-node1 platform-python[73390]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:37 managed-node1 platform-python[73498]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933797.1321883-20621-109965661389418/source _original_basename=tmpklejpicd follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:38 managed-node1 platform-python[73623]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:16:38 managed-node1 systemd[1]: Reloading.\nJul 07 20:16:38 managed-node1 quadlet-generator[72970]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:16:39 managed-node1 platform-python[73783]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:40 managed-node1 platform-python[73908]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml\nJul 07 20:16:40 managed-node1 platform-python[74031]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:41 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:16:41 managed-node1 platform-python[74156]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:59 managed-node1 platform-python[74751]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:16:59 managed-node1 platform-python[74874]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:16:59 managed-node1 platform-python[74973]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933819.1617088-20790-103013089228288/source dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:17:00 managed-node1 platform-python[75098]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:17:00 managed-node1 systemd[1]: Reloading.\nJul 07 20:17:00 managed-node1 quadlet-generator[73632]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:17:00 managed-node1 platform-python[75251]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:17:00 managed-node1 systemd[1]: Starting quadlet-demo.service...\n-- Subject: Unit quadlet-demo.service has begun start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo.service has begun starting up.\nJul 07 20:17:00 managed-node1 quadlet-demo[75258]: Pods stopped:\nJul 07 20:17:00 managed-node1 quadlet-demo[75258]: Pods removed:\nJul 07 20:17:00 managed-node1 quadlet-demo[75258]: Secrets removed:\nJul 07 20:17:00 managed-node1 quadlet-demo[75258]: Volumes removed:\nJul 07 20:17:01 managed-node1 systemd[1]: Created slice cgroup machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice.\n-- Subject: Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container 6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.\n-- Subject: Unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 NetworkManager[682]: [1751933821.2795] manager: (veth0d2aa8bf): new Veth device (/org/freedesktop/NetworkManager/Devices/10)\nJul 07 20:17:01 managed-node1 systemd-udevd[75344]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.\nJul 07 20:17:01 managed-node1 systemd-udevd[75344]: Could not generate persistent MAC address for veth0d2aa8bf: No such file or directory\nJul 07 20:17:01 managed-node1 quadlet-generator[75107]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): veth0d2aa8bf: link is not ready\nJul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered blocking state\nJul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state\nJul 07 20:17:01 managed-node1 kernel: device veth0d2aa8bf entered promiscuous mode\nJul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready\nJul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready\nJul 07 20:17:01 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth0d2aa8bf: link becomes ready\nJul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered blocking state\nJul 07 20:17:01 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered forwarding state\nJul 07 20:17:01 managed-node1 NetworkManager[682]: [1751933821.3000] device (veth0d2aa8bf): carrier: link connected\nJul 07 20:17:01 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 2 addresses\nJul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.\n-- Subject: Unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container 895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.\n-- Subject: Unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 systemd[1]: Started /usr/bin/podman healthcheck run 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.\n-- Subject: Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 systemd[1]: Started libcontainer container a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.\n-- Subject: Unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: Volumes:\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: wp-pv-claim\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: Pod:\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: Containers:\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: 895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9\nJul 07 20:17:01 managed-node1 quadlet-demo[75258]: a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6\nJul 07 20:17:01 managed-node1 systemd[1]: Started quadlet-demo.service.\n-- Subject: Unit quadlet-demo.service has finished start-up\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo.service has finished starting up.\n-- \n-- The start-up result is done.\nJul 07 20:17:02 managed-node1 systemd[1]: 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit 43ced9d07da7437d147eff38f7da29113e9e1921737f3b0c4de1f7f5b0c13f64.service has successfully entered the 'dead' state.\nJul 07 20:17:02 managed-node1 platform-python[75742]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:02 managed-node1 platform-python[75934]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:03 managed-node1 platform-python[76092]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:03 managed-node1 platform-python[76230]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:04 managed-node1 platform-python[76361]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:04 managed-node1 platform-python[76487]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:17:05 managed-node1 platform-python[76614]: ansible-ansible.legacy.command Invoked with _raw_params=cat /run/out _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:06 managed-node1 platform-python[76738]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:06 managed-node1 platform-python[76870]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:06 managed-node1 platform-python[77001]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:07 managed-node1 platform-python[77127]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:09 managed-node1 platform-python[77376]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:10 managed-node1 platform-python[77505]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:12 managed-node1 platform-python[77630]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None\nJul 07 20:17:15 managed-node1 platform-python[77754]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:17:16 managed-node1 platform-python[77881]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:17:16 managed-node1 platform-python[78008]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:17:17 managed-node1 platform-python[78131]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:17:22 managed-node1 platform-python[78644]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:23 managed-node1 platform-python[78769]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:17:23 managed-node1 systemd[1]: Reloading.\nJul 07 20:17:23 managed-node1 systemd[1]: Stopping quadlet-demo.service...\n-- Subject: Unit quadlet-demo.service has begun shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo.service has begun shutting down.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope: Consumed 36ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18.scope completed and consumed the indicated resources.\nJul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-6590bf67f59b024925ebe92e45650f39784ad9bf9527a4752fd7df5e77429d18-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-cc262e6d970681dd648fc289e4ce567fcb386d591ba9938e73ee237bbb02d14e-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-cc262e6d970681dd648fc289e4ce567fcb386d591ba9938e73ee237bbb02d14e-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope: Consumed 32ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b.scope completed and consumed the indicated resources.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope: Consumed 128ms CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-a14813358ad3507375eed8b0e119a45baead9a1480fe35dbdab5e0bfa8c011e6.scope completed and consumed the indicated resources.\nJul 07 20:17:23 managed-node1 dnsmasq[72207]: read /run/containers/cni/dnsname/systemd-quadlet-demo/addnhosts - 1 addresses\nJul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-c29144925da4bc55c018f04bef611bd79cddabc79432ac8a4a5bac02cf32c71d-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-c29144925da4bc55c018f04bef611bd79cddabc79432ac8a4a5bac02cf32c71d-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope: Consumed 1.097s CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit libpod-895e1420c8b6c7cea6ce319e7c1d813547b8d58ac960cbf44c657f0a3cfbe0e9.scope completed and consumed the indicated resources.\nJul 07 20:17:23 managed-node1 quadlet-generator[78780]: Warning: bogus.container specifies the image \"this_is_a_bogus_image\" which not a fully qualified image name. This is not ideal for performance and security reasons. See the podman-pull manpage discussion of short-name-aliases.conf for details.\nJul 07 20:17:23 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state\nJul 07 20:17:23 managed-node1 kernel: device veth0d2aa8bf left promiscuous mode\nJul 07 20:17:23 managed-node1 kernel: cni-podman2: port 2(veth0d2aa8bf) entered disabled state\nJul 07 20:17:23 managed-node1 systemd[1]: run-netns-netns\\x2d21744e05\\x2dcfe3\\x2df219\\x2d733e\\x2d9e275b4aa4fe.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit run-netns-netns\\x2d21744e05\\x2dcfe3\\x2df219\\x2d733e\\x2d9e275b4aa4fe.mount has successfully entered the 'dead' state.\nJul 07 20:17:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-4cc4f3ebd45dd82a955fee4fc7ba69575eb6e40399169da0568ef4a19728535d-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-4cc4f3ebd45dd82a955fee4fc7ba69575eb6e40399169da0568ef4a19728535d-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice.\n-- Subject: Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice has finished shutting down.\nJul 07 20:17:24 managed-node1 systemd[1]: machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice: Consumed 1.258s CPU time\n-- Subject: Resources consumed by unit runtime\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit machine-libpod_pod_501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372.slice completed and consumed the indicated resources.\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: Pods stopped:\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: Pods removed:\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: 501c88c145059d5dbd74a5422acbaf3a1ae9da7be975c7ccbd98500190216372\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: Secrets removed:\nJul 07 20:17:24 managed-node1 quadlet-demo[78819]: Volumes removed:\nJul 07 20:17:24 managed-node1 systemd[1]: quadlet-demo.service: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit quadlet-demo.service has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: Stopped quadlet-demo.service.\n-- Subject: Unit quadlet-demo.service has finished shutting down\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- Unit quadlet-demo.service has finished shutting down.\nJul 07 20:17:24 managed-node1 platform-python[79125]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay-2e7d397c3505d1186cb0eec8515ded4aa51320c6484af2e71fdb9a647a72ccec-merged.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay-2e7d397c3505d1186cb0eec8515ded4aa51320c6484af2e71fdb9a647a72ccec-merged.mount has successfully entered the 'dead' state.\nJul 07 20:17:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b-userdata-shm.mount: Succeeded.\n-- Subject: Unit succeeded\n-- Defined-By: systemd\n-- Support: https://access.redhat.com/support\n-- \n-- The unit var-lib-containers-storage-overlay\\x2dcontainers-cb9417aec73250a882bcbeaea691ec75dc135d57e636ddd7978c781a6376812b-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:17:25 managed-node1 platform-python[79373]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2\n set -x\n set -o pipefail\n systemctl list-units --plain -l --all | grep quadlet || :\n systemctl list-unit-files --all | grep quadlet || :\n systemctl list-units --plain --failed -l --all | grep quadlet || :\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:17:26 managed-node1 platform-python[79503]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None",
"task_name": "Get journald",
"task_path": "/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:217"
}
]
SYSTEM ROLES ERRORS END v1
TASKS RECAP ********************************************************************
Monday 07 July 2025 20:17:26 -0400 (0:00:00.509) 0:01:46.445 ***********
===============================================================================
fedora.linux_system_roles.podman : Ensure container images are present -- 17.26s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.podman : Ensure container images are present --- 7.22s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18
fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 3.02s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5
fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.90s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:15
fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.86s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
fedora.linux_system_roles.firewall : Install firewalld ------------------ 2.85s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:51
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.73s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
fedora.linux_system_roles.podman : Start service ------------------------ 1.58s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
fedora.linux_system_roles.podman : Gather the package facts ------------- 1.44s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6
Check web --------------------------------------------------------------- 1.29s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121
fedora.linux_system_roles.podman : Stop and disable service ------------- 1.27s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12
Gathering Facts --------------------------------------------------------- 1.24s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.24s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.19s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:143
fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.09s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:74
fedora.linux_system_roles.podman : Start service ------------------------ 1.02s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115
fedora.linux_system_roles.certificate : Remove files -------------------- 1.01s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:174
fedora.linux_system_roles.firewall : Enable and start firewalld service --- 0.97s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:30
fedora.linux_system_roles.certificate : Ensure certificate requests ----- 0.97s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:86
fedora.linux_system_roles.podman : Reload systemctl --------------------- 0.84s
/tmp/collections-pTD/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87